Просмотр исходного кода

AMBARI-8434. Distribute Repositories/Install Components - server part (dlysnichenko)

Lisnichenko Dmitro 10 лет назад
Родитель
Сommit
71cf447e6a
18 измененных файлов с 1140 добавлено и 85 удалено
  1. 16 0
      ambari-server/src/main/java/org/apache/ambari/server/api/services/ClusterStackVersionService.java
  2. 2 4
      ambari-server/src/main/java/org/apache/ambari/server/api/services/HostService.java
  3. 31 5
      ambari-server/src/main/java/org/apache/ambari/server/api/services/HostStackVersionService.java
  4. 2 2
      ambari-server/src/main/java/org/apache/ambari/server/api/services/RequestService.java
  5. 13 0
      ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementController.java
  6. 37 21
      ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
  7. 4 0
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractControllerResourceProvider.java
  8. 219 7
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
  9. 2 2
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/DefaultProviderModule.java
  10. 257 6
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostStackVersionResourceProvider.java
  11. 9 0
      ambari-server/src/main/java/org/apache/ambari/server/controller/spi/NoSuchParentResourceException.java
  12. 14 7
      ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
  13. 4 4
      ambari-server/src/test/java/org/apache/ambari/server/api/services/HostStackVersionServiceTest.java
  14. 36 1
      ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
  15. 243 0
      ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
  16. 217 0
      ambari-server/src/test/java/org/apache/ambari/server/controller/internal/HostStackVersionResourceProviderTest.java
  17. 30 22
      ambari-server/src/test/python/custom_actions/TestInstallPackages.py
  18. 4 4
      ambari-server/src/test/python/custom_actions/configs/install_packages_config.json

+ 16 - 0
ambari-server/src/main/java/org/apache/ambari/server/api/services/ClusterStackVersionService.java

@@ -21,6 +21,7 @@ import java.util.HashMap;
 import java.util.Map;
 
 import javax.ws.rs.GET;
+import javax.ws.rs.POST;
 import javax.ws.rs.Path;
 import javax.ws.rs.PathParam;
 import javax.ws.rs.Produces;
@@ -97,6 +98,21 @@ public class ClusterStackVersionService extends BaseService {
     return new RepositoryVersionService(stackVersionProperties);
   }
 
+  /**
+   * Handles: POST /{clustername}/stack_versions requests
+   * Distribute repositories/install packages.
+   *
+   * @param body        http body
+   * @param headers     http headers
+   * @param ui          uri info
+   * @return information regarding the created services
+   */
+  @POST
+  @Produces("text/plain")
+  public Response createRequests(String body, @Context HttpHeaders headers, @Context UriInfo ui) {
+    return handleRequest(headers, body, ui, Request.Type.POST, createResource(null));
+  }
+
   /**
    * Create a cluster stack version resource instance.
    *

+ 2 - 4
ambari-server/src/main/java/org/apache/ambari/server/api/services/HostService.java

@@ -249,7 +249,7 @@ public class HostService extends BaseService {
   public HostStackVersionService getHostStackVersionService(@Context javax.ws.rs.core.Request request,
       @PathParam("hostName") String hostName) {
 
-    return new HostStackVersionService(hostName);
+    return new HostStackVersionService(hostName, m_clusterName);
   }
 
   /**
@@ -264,11 +264,9 @@ public class HostService extends BaseService {
    * @return a host resource instance
    */
   ResourceInstance createHostResource(String clusterName, String hostName, UriInfo ui) {
-    boolean isAttached = ui.getRequestUri().toString().contains("/clusters/");
-
     Map<Resource.Type,String> mapIds = new HashMap<Resource.Type, String>();
     mapIds.put(Resource.Type.Host, hostName);
-    if (isAttached) {
+    if (clusterName != null) {
       mapIds.put(Resource.Type.Cluster, clusterName);
     }
 

+ 31 - 5
ambari-server/src/main/java/org/apache/ambari/server/api/services/HostStackVersionService.java

@@ -21,6 +21,7 @@ import java.util.HashMap;
 import java.util.Map;
 
 import javax.ws.rs.GET;
+import javax.ws.rs.POST;
 import javax.ws.rs.Path;
 import javax.ws.rs.PathParam;
 import javax.ws.rs.Produces;
@@ -41,13 +42,19 @@ public class HostStackVersionService extends BaseService {
    */
   private String hostName;
 
+  /**
+   * Parent cluster name.
+   */
+  private String clusterName;
+
   /**
    * Constructor.
    *
    * @param hostName name of the host
    */
-  public HostStackVersionService(String hostName) {
+  public HostStackVersionService(String hostName, String clusterName) {
     this.hostName = hostName;
+    this.clusterName = clusterName;
   }
 
   /**
@@ -62,12 +69,12 @@ public class HostStackVersionService extends BaseService {
   @GET
   @Produces("text/plain")
   public Response getHostStackVersions(@Context HttpHeaders headers, @Context UriInfo ui) {
-    return handleRequest(headers, null, ui, Request.Type.GET, createResource(hostName, null));
+    return handleRequest(headers, null, ui, Request.Type.GET, createResource(ui, clusterName, hostName, null));
   }
 
   /**
    * Gets a single host stack version.
-   * Handles: GET /host/{hostname}/host_versions/{stackversionid} requests.
+   * Handles: GET /hosts/{hostname}/host_versions/{stackversionid} requests.
    *
    * @param headers        http headers
    * @param ui             uri info
@@ -80,7 +87,22 @@ public class HostStackVersionService extends BaseService {
   @Produces("text/plain")
   public Response getHostStackVersion(@Context HttpHeaders headers, @Context UriInfo ui,
       @PathParam("stackVersionId") String stackVersionId) {
-    return handleRequest(headers, null, ui, Request.Type.GET, createResource(hostName, stackVersionId));
+    return handleRequest(headers, null, ui, Request.Type.GET, createResource(ui, clusterName, hostName, stackVersionId));
+  }
+
+  /**
+   * Handles: POST /clusters/{clusterID}/hosts/{hostname}/host_versions requests
+   * Distribute repositories/install packages on host.
+   *
+   * @param body        http body
+   * @param headers     http headers
+   * @param ui          uri info
+   * @return information regarding the created services
+   */
+  @POST
+  @Produces("text/plain")
+  public Response createRequests(String body, @Context HttpHeaders headers, @Context UriInfo ui) {
+    return handleRequest(headers, body, ui, Request.Type.POST, createResource(ui, clusterName, hostName, null));
   }
 
   /**
@@ -100,12 +122,16 @@ public class HostStackVersionService extends BaseService {
   /**
    * Create a host stack version resource instance.
    *
+   * @param clusterName
    * @param hostName host name
    * @param stackVersionId host stack version id
    * @return a host host version resource instance
    */
-  private ResourceInstance createResource(String hostName, String stackVersionId) {
+  private ResourceInstance createResource(UriInfo ui, String clusterName, String hostName, String stackVersionId) {
     final Map<Resource.Type, String> mapIds = new HashMap<Resource.Type, String>();
+    if (clusterName != null) {
+      mapIds.put(Resource.Type.Cluster, clusterName);
+    }
     mapIds.put(Resource.Type.Host, hostName);
     mapIds.put(Resource.Type.HostStackVersion, stackVersionId);
     return createResource(Resource.Type.HostStackVersion, mapIds);

+ 2 - 2
ambari-server/src/main/java/org/apache/ambari/server/api/services/RequestService.java

@@ -119,7 +119,7 @@ public class RequestService extends BaseService {
    * @param body        http body
    * @param headers     http headers
    * @param ui          uri info
-   * @return information regarding the created services
+   * @return information regarding the updated requests
    */
   @PUT
   @Path("{requestId}")
@@ -136,7 +136,7 @@ public class RequestService extends BaseService {
    * @param body        http body
    * @param headers     http headers
    * @param ui          uri info
-   * @return information regarding the created services
+   * @return information regarding the created requests
    */
   @POST
   @Produces("text/plain")

+ 13 - 0
ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementController.java

@@ -35,11 +35,14 @@ import org.apache.ambari.server.state.ServiceComponent;
 import org.apache.ambari.server.state.ServiceComponentFactory;
 import org.apache.ambari.server.state.ServiceComponentHost;
 import org.apache.ambari.server.state.ServiceFactory;
+import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.ServiceOsSpecific;
 import org.apache.ambari.server.state.State;
 import org.apache.ambari.server.state.configgroup.ConfigGroupFactory;
 import org.apache.ambari.server.state.scheduler.RequestExecutionFactory;
 
 import java.util.Collection;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -718,5 +721,15 @@ public interface AmbariManagementController {
    * @throws AmbariException
    */
   public Set<StackConfigurationResponse> getStackLevelConfigurations(Set<StackLevelConfigurationRequest> requests) throws AmbariException;
+
+  /**
+   * @param serviceInfo service info for a given service
+   * @param hostParams parameter map. May be changed during method execution
+   * @param osFamily os family for host
+   * @return a full list of package dependencies for a service that should be
+   * installed on a host
+   */
+  List<ServiceOsSpecific.Package> getPackagesForServiceHost(ServiceInfo serviceInfo,
+                                                            Map<String, String> hostParams, String osFamily);
 }
 

+ 37 - 21
ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java

@@ -1633,24 +1633,8 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     hostParams.put(REPO_INFO, repoInfo);
     hostParams.putAll(getRcaParameters());
 
-    // Write down os specific info for the service
-    ServiceOsSpecific anyOs = null;
-    if (serviceInfo.getOsSpecifics().containsKey(AmbariMetaInfo.ANY_OS)) {
-      anyOs = serviceInfo.getOsSpecifics().get(AmbariMetaInfo.ANY_OS);
-    }
-
-    ServiceOsSpecific hostOs = populateServicePackagesInfo(serviceInfo, hostParams, osFamily);
-
-    // Build package list that is relevant for host
     List<ServiceOsSpecific.Package> packages =
-      new ArrayList<ServiceOsSpecific.Package>();
-    if (anyOs != null) {
-      packages.addAll(anyOs.getPackages());
-    }
-
-    if (hostOs != null) {
-      packages.addAll(hostOs.getPackages());
-    }
+            getPackagesForServiceHost(serviceInfo, hostParams, osFamily);
     String packageList = gson.toJson(packages);
     hostParams.put(PACKAGE_LIST, packageList);
 
@@ -1684,12 +1668,21 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     execCmd.setRoleParams(roleParams);
   }
 
+  /**
+   * Computes os-dependent packages for service/host. Does not take into
+   * account package dependencies for ANY_OS. Instead of this method
+   * you should use getPackagesForServiceHost()
+   * because it takes into account both os-dependent and os-independent lists
+   * of packages for service.
+   * @param hostParams may be modified (appended SERVICE_REPO_INFO)
+   * @return a list of os-dependent packages for host
+   */
   protected ServiceOsSpecific populateServicePackagesInfo(ServiceInfo serviceInfo, Map<String, String> hostParams,
                                                         String osFamily) {
     ServiceOsSpecific hostOs = new ServiceOsSpecific(osFamily);
-    List<ServiceOsSpecific> foundedOSSpecifics = getOSSpecificsByFamily(serviceInfo.getOsSpecifics(), osFamily);
-    if (!foundedOSSpecifics.isEmpty()) {
-      for (ServiceOsSpecific osSpecific : foundedOSSpecifics) {
+    List<ServiceOsSpecific> foundOSSpecifics = getOSSpecificsByFamily(serviceInfo.getOsSpecifics(), osFamily);
+    if (!foundOSSpecifics.isEmpty()) {
+      for (ServiceOsSpecific osSpecific : foundOSSpecifics) {
         hostOs.addPackages(osSpecific.getPackages());
       }
       // Choose repo that is relevant for host
@@ -1699,10 +1692,33 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
         hostParams.put(SERVICE_REPO_INFO, serviceRepoInfo);
       }
     }
-
     return hostOs;
   }
 
+  @Override
+  public List<ServiceOsSpecific.Package> getPackagesForServiceHost(ServiceInfo serviceInfo, Map<String, String> hostParams, String osFamily) {
+    // Write down os specific info for the service
+    ServiceOsSpecific anyOs = null;
+    if (serviceInfo.getOsSpecifics().containsKey(AmbariMetaInfo.ANY_OS)) {
+      anyOs = serviceInfo.getOsSpecifics().get(AmbariMetaInfo.ANY_OS);
+    }
+
+    ServiceOsSpecific hostOs = populateServicePackagesInfo(serviceInfo, hostParams, osFamily);
+
+    // Build package list that is relevant for host
+    List<ServiceOsSpecific.Package> packages =
+            new ArrayList<ServiceOsSpecific.Package>();
+    if (anyOs != null) {
+      packages.addAll(anyOs.getPackages());
+    }
+
+    if (hostOs != null) {
+      packages.addAll(hostOs.getPackages());
+    }
+
+    return packages;
+  }
+
   private List<ServiceOsSpecific> getOSSpecificsByFamily(Map<String, ServiceOsSpecific> osSpecifics, String osFamily) {
     List<ServiceOsSpecific> foundedOSSpecifics = new ArrayList<ServiceOsSpecific>();
     for (Entry<String, ServiceOsSpecific> osSpecific : osSpecifics.entrySet()) {

+ 4 - 0
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractControllerResourceProvider.java

@@ -119,6 +119,10 @@ public abstract class AbstractControllerResourceProvider extends AbstractResourc
         return new StackResourceProvider(propertyIds, keyPropertyIds, managementController);
       case StackVersion:
         return new StackVersionResourceProvider(propertyIds, keyPropertyIds, managementController);
+      case ClusterStackVersion:
+        return new ClusterStackVersionResourceProvider(managementController);
+      case HostStackVersion:
+        return new HostStackVersionResourceProvider(managementController);
       case StackService:
         return new StackServiceResourceProvider(propertyIds, keyPropertyIds, managementController);
       case StackServiceComponent:

+ 219 - 7
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java

@@ -17,38 +17,64 @@
  */
 package org.apache.ambari.server.controller.internal;
 
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JDK_LOCATION;
+
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.ambari.server.AmbariException;
+import com.google.gson.Gson;
+import com.google.inject.Provider;
 import org.apache.ambari.server.StaticallyInject;
+import org.apache.ambari.server.actionmanager.ActionManager;
+import org.apache.ambari.server.actionmanager.RequestFactory;
+import org.apache.ambari.server.actionmanager.Stage;
+import org.apache.ambari.server.actionmanager.StageFactory;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.controller.ActionExecutionContext;
+import org.apache.ambari.server.controller.AmbariActionExecutionHelper;
+import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
 import org.apache.ambari.server.controller.spi.NoSuchResourceException;
 import org.apache.ambari.server.controller.spi.Predicate;
 import org.apache.ambari.server.controller.spi.Request;
 import org.apache.ambari.server.controller.spi.RequestStatus;
 import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.Resource.Type;
 import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
 import org.apache.ambari.server.controller.spi.SystemException;
 import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
-import org.apache.ambari.server.controller.spi.Resource.Type;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
 import org.apache.ambari.server.orm.dao.ClusterVersionDAO;
 import org.apache.ambari.server.orm.dao.HostVersionDAO;
+import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
 import org.apache.ambari.server.orm.entities.ClusterVersionEntity;
 import org.apache.ambari.server.orm.entities.HostVersionEntity;
+import org.apache.ambari.server.orm.entities.OperatingSystemEntity;
+import org.apache.ambari.server.orm.entities.RepositoryEntity;
+import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.RepositoryVersionState;
+import org.apache.ambari.server.state.ServiceComponentHost;
+import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.ServiceOsSpecific;
+import org.apache.ambari.server.utils.StageUtils;
 
 import com.google.inject.Inject;
+import org.apache.ambari.server.state.StackId;
 
 /**
  * Resource provider for cluster stack versions resources.
  */
 @StaticallyInject
-public class ClusterStackVersionResourceProvider extends AbstractResourceProvider {
+public class ClusterStackVersionResourceProvider extends AbstractControllerResourceProvider {
 
   // ----- Property ID constants ---------------------------------------------
 
@@ -59,6 +85,13 @@ public class ClusterStackVersionResourceProvider extends AbstractResourceProvide
   protected static final String CLUSTER_STACK_VERSION_STATE_PROPERTY_ID                = PropertyHelper.getPropertyId("ClusterStackVersions", "state");
   protected static final String CLUSTER_STACK_VERSION_HOST_STATES_PROPERTY_ID          = PropertyHelper.getPropertyId("ClusterStackVersions", "host_states");
 
+  protected static final String STACK_VERSION_REPO_VERSION_PROPERTY_ID = PropertyHelper.getPropertyId("StackVersion", "repository_version");
+  protected static final String STACK_VERSION_STACK_PROPERTY_ID    = PropertyHelper.getPropertyId("StackVersion", "stack");
+  protected static final String STACK_VERSION_VERSION_PROPERTY_ID    = PropertyHelper.getPropertyId("StackVersion", "version");
+
+  protected static final String INSTALL_PACKAGES_ACTION = "install_packages";
+  protected static final String INSTALL_PACKAGES_FULL_NAME = "Distribute repositories/install packages";
+
   @SuppressWarnings("serial")
   private static Set<String> pkPropertyIds = new HashSet<String>() {
     {
@@ -66,6 +99,9 @@ public class ClusterStackVersionResourceProvider extends AbstractResourceProvide
       add(CLUSTER_STACK_VERSION_ID_PROPERTY_ID);
       add(CLUSTER_STACK_VERSION_STACK_PROPERTY_ID);
       add(CLUSTER_STACK_VERSION_VERSION_PROPERTY_ID);
+      add(STACK_VERSION_REPO_VERSION_PROPERTY_ID);
+      add(STACK_VERSION_STACK_PROPERTY_ID);
+      add(STACK_VERSION_VERSION_PROPERTY_ID);
     }
   };
 
@@ -78,6 +114,9 @@ public class ClusterStackVersionResourceProvider extends AbstractResourceProvide
       add(CLUSTER_STACK_VERSION_VERSION_PROPERTY_ID);
       add(CLUSTER_STACK_VERSION_STATE_PROPERTY_ID);
       add(CLUSTER_STACK_VERSION_HOST_STATES_PROPERTY_ID);
+      add(STACK_VERSION_REPO_VERSION_PROPERTY_ID);
+      add(STACK_VERSION_STACK_PROPERTY_ID);
+      add(STACK_VERSION_VERSION_PROPERTY_ID);
     }
   };
 
@@ -97,11 +136,26 @@ public class ClusterStackVersionResourceProvider extends AbstractResourceProvide
   @Inject
   private static HostVersionDAO hostVersionDAO;
 
+  @Inject
+  private static RepositoryVersionDAO repositoryVersionDAO;
+
+  private static Gson gson = StageUtils.getGson();
+
+  @Inject
+  private static Provider<AmbariActionExecutionHelper> actionExecutionHelper;
+
+  @Inject
+  private static StageFactory stageFactory;
+
+  @Inject
+  private static RequestFactory requestFactory;
+
   /**
    * Constructor.
    */
-  public ClusterStackVersionResourceProvider() {
-    super(propertyIds, keyPropertyIds);
+  public ClusterStackVersionResourceProvider(
+          AmbariManagementController managementController) {
+    super(propertyIds, keyPropertyIds, managementController);
   }
 
   @Override
@@ -159,11 +213,169 @@ public class ClusterStackVersionResourceProvider extends AbstractResourceProvide
 
   @Override
   public RequestStatus createResources(Request request) throws SystemException,
-      UnsupportedPropertyException, ResourceAlreadyExistsException,
-      NoSuchParentResourceException {
-    throw new SystemException("Method not supported");
+          UnsupportedPropertyException, ResourceAlreadyExistsException,
+          NoSuchParentResourceException {
+    Iterator<Map<String, Object>> iterator = request.getProperties().iterator();
+    String clName;
+    String desiredRepoVersion;
+    String stackName;
+    String stackVersion;
+    if (request.getProperties().size() != 1) {
+      throw new UnsupportedOperationException("Multiple requests cannot be executed at the same time.");
+    }
+
+    Map<String, Object> propertyMap = iterator.next();
+    if (!propertyMap.containsKey(CLUSTER_STACK_VERSION_CLUSTER_NAME_PROPERTY_ID) ||
+            !propertyMap.containsKey(STACK_VERSION_REPO_VERSION_PROPERTY_ID)) {
+      throw new IllegalArgumentException(
+              String.format("%s or %s not defined",
+                      CLUSTER_STACK_VERSION_CLUSTER_NAME_PROPERTY_ID,
+                      STACK_VERSION_REPO_VERSION_PROPERTY_ID));
+    }
+    clName = (String) propertyMap.get(CLUSTER_STACK_VERSION_CLUSTER_NAME_PROPERTY_ID);
+    desiredRepoVersion = (String) propertyMap.get(STACK_VERSION_REPO_VERSION_PROPERTY_ID);
+
+    Cluster cluster;
+    Map<String, Host> hostsForCluster;
+
+    AmbariManagementController managementController = getManagementController();
+    AmbariMetaInfo ami = managementController.getAmbariMetaInfo();
+    try {
+      cluster = managementController.getClusters().getCluster(clName);
+      hostsForCluster = managementController.getClusters().getHostsForCluster(clName);
+    } catch (AmbariException e) {
+      throw new NoSuchParentResourceException(e.getMessage(), e);
+    }
+
+    String stackId;
+    if (propertyMap.containsKey(STACK_VERSION_STACK_PROPERTY_ID) &&
+            propertyMap.containsKey(STACK_VERSION_VERSION_PROPERTY_ID)) {
+      stackName = (String) propertyMap.get(STACK_VERSION_STACK_PROPERTY_ID);
+      stackVersion = (String) propertyMap.get(STACK_VERSION_VERSION_PROPERTY_ID);
+      stackId = new StackId(stackName, stackVersion).getStackId();
+      if (! ami.isSupportedStack(stackName, stackVersion)) {
+        throw new NoSuchParentResourceException(String.format("Stack %s is not supported",
+                stackId));
+      }
+    } else { // Using stack that is current for cluster
+      StackId currentStackVersion = cluster.getCurrentStackVersion();
+      stackName = currentStackVersion.getStackName();
+      stackVersion = currentStackVersion.getStackVersion();
+      stackId = currentStackVersion.getStackId();
+    }
+
+    RepositoryVersionEntity repoVersionEnt = repositoryVersionDAO.findByStackAndVersion(stackId, desiredRepoVersion);
+    if (repoVersionEnt == null) {
+      throw new IllegalArgumentException(String.format(
+              "Repo version %s is not available for stack %s",
+              desiredRepoVersion, stackId));
+    }
+    List<OperatingSystemEntity> operatingSystems = repoVersionEnt.getOperatingSystems();
+    Map<String, List<RepositoryEntity>> perOsRepos = new HashMap<String, List<RepositoryEntity>>();
+    for (OperatingSystemEntity operatingSystem : operatingSystems) {
+      perOsRepos.put(operatingSystem.getOsType(), operatingSystem.getRepositories());
+    }
+
+    RequestStageContainer req = createRequest();
+    String stageName = String.format(INSTALL_PACKAGES_FULL_NAME);
+
+    Map<String, String> hostLevelParams = new HashMap<String, String>();
+    hostLevelParams.put(JDK_LOCATION, getManagementController().getJdkResourceUrl());
+
+    Stage stage = stageFactory.createNew(req.getId(),
+            "/tmp/ambari",
+            cluster.getClusterName(),
+            cluster.getClusterId(),
+            stageName,
+            "{}",
+            "{}",
+            StageUtils.getGson().toJson(hostLevelParams));
+
+    long stageId = req.getLastStageId() + 1;
+    if (0L == stageId) {
+      stageId = 1L;
+    }
+    stage.setStageId(stageId);
+    req.addStages(Collections.singletonList(stage));
+
+    for (Host host : hostsForCluster.values()) {
+      // Determine repositories for host
+      final List<RepositoryEntity> repoInfo = perOsRepos.get(host.getOsFamily());
+      if (repoInfo == null) {
+        throw new SystemException(String.format("Repositories for os type %s are " +
+                        "not defined. Repo version=%s, stackId=%s",
+                        host.getOsFamily(), desiredRepoVersion, stackId));
+      }
+      // For every host at cluster, determine packages for all installed services
+      List<ServiceOsSpecific.Package> packages = new ArrayList<ServiceOsSpecific.Package>();
+      Set<String> servicesOnHost = new HashSet<String>();
+      List<ServiceComponentHost> components = cluster.getServiceComponentHosts(host.getHostName());
+      for (ServiceComponentHost component : components) {
+        servicesOnHost.add(component.getServiceName());
+      }
+
+      for (String serviceName : servicesOnHost) {
+        ServiceInfo info;
+        try {
+          info = ami.getService(stackName, stackVersion, serviceName);
+        } catch (AmbariException e) {
+          throw new SystemException("Cannot enumerate services", e);
+        }
+
+        List<ServiceOsSpecific.Package> packagesForService = managementController.getPackagesForServiceHost(info,
+                new HashMap<String, String>(), // Contents are ignored
+                host.getOsFamily());
+        packages.addAll(packagesForService);
+      }
+      final String packageList = gson.toJson(packages);
+      final String repoList = gson.toJson(repoInfo);
+
+      Map<String, String> params = new HashMap<String, String>() {{
+        put("base_urls", repoList);
+        put("package_list", packageList);
+      }};
+
+      // add host to this stage
+      RequestResourceFilter filter = new RequestResourceFilter(null, null,
+              Collections.singletonList(host.getHostName()));
+
+      ActionExecutionContext actionContext = new ActionExecutionContext(
+              cluster.getClusterName(), INSTALL_PACKAGES_ACTION,
+              Collections.singletonList(filter),
+              params);
+      actionContext.setTimeout((short) 60);
+
+      try {
+        actionExecutionHelper.get().addExecutionCommandsToStage(actionContext, stage);
+      } catch (AmbariException e) {
+        throw new SystemException("Can not modify stage", e);
+      }
+    }
+
+    try {
+      req.persist();
+
+      //TODO: create cluster version entity
+      //clusterVersionDAO.create();
+    } catch (AmbariException e) {
+      throw new SystemException("Can not persist request", e);
+    }
+    return getRequestStatus(req.getRequestStatusResponse());
   }
 
+
+
+  private RequestStageContainer createRequest() {
+    ActionManager actionManager = getManagementController().getActionManager();
+
+    RequestStageContainer requestStages = new RequestStageContainer(
+            actionManager.getNextRequestId(), null, requestFactory, actionManager);
+    requestStages.setRequestContext(String.format(INSTALL_PACKAGES_FULL_NAME));
+
+    return requestStages;
+  }
+
+
   @Override
   public RequestStatus updateResources(Request request, Predicate predicate)
       throws SystemException, UnsupportedPropertyException,

+ 2 - 2
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/DefaultProviderModule.java

@@ -103,9 +103,9 @@ public class DefaultProviderModule extends AbstractProviderModule {
       case UpgradeItem:
         return new UpgradeItemResourceProvider(managementController);
       case ClusterStackVersion:
-        return new ClusterStackVersionResourceProvider();
+        return new ClusterStackVersionResourceProvider(managementController);
       case HostStackVersion:
-        return new HostStackVersionResourceProvider();
+        return new HostStackVersionResourceProvider(managementController);
       case Stage:
         return new StageResourceProvider();
       case OperatingSystem:

+ 257 - 6
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostStackVersionResourceProvider.java

@@ -18,13 +18,26 @@
 package org.apache.ambari.server.controller.internal;
 
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import com.google.gson.Gson;
+import com.google.inject.Provider;
+import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.StaticallyInject;
+import org.apache.ambari.server.actionmanager.ActionManager;
+import org.apache.ambari.server.actionmanager.Stage;
+import org.apache.ambari.server.actionmanager.StageFactory;
+import org.apache.ambari.server.actionmanager.RequestFactory;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.controller.ActionExecutionContext;
+import org.apache.ambari.server.controller.AmbariActionExecutionHelper;
+import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
 import org.apache.ambari.server.controller.spi.NoSuchResourceException;
 import org.apache.ambari.server.controller.spi.Predicate;
@@ -37,31 +50,57 @@ import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
 import org.apache.ambari.server.controller.spi.Resource.Type;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
 import org.apache.ambari.server.orm.dao.HostVersionDAO;
+import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
 import org.apache.ambari.server.orm.entities.HostVersionEntity;
+import org.apache.ambari.server.orm.entities.OperatingSystemEntity;
+import org.apache.ambari.server.orm.entities.RepositoryEntity;
 import com.google.inject.Inject;
+import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Host;
+import org.apache.ambari.server.state.ServiceComponentHost;
+import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.ServiceOsSpecific;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.utils.StageUtils;
+
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JDK_LOCATION;
 
 /**
  * Resource provider for host stack versions resources.
  */
 @StaticallyInject
-public class HostStackVersionResourceProvider extends AbstractResourceProvider {
+public class HostStackVersionResourceProvider extends AbstractControllerResourceProvider {
 
   // ----- Property ID constants ---------------------------------------------
 
   protected static final String HOST_STACK_VERSION_ID_PROPERTY_ID              = PropertyHelper.getPropertyId("HostStackVersions", "id");
+  protected static final String HOST_STACK_VERSION_CLUSTER_NAME_PROPERTY_ID    = PropertyHelper.getPropertyId("HostStackVersions", "cluster_name");
   protected static final String HOST_STACK_VERSION_HOST_NAME_PROPERTY_ID       = PropertyHelper.getPropertyId("HostStackVersions", "host_name");
   protected static final String HOST_STACK_VERSION_STACK_PROPERTY_ID           = PropertyHelper.getPropertyId("HostStackVersions", "stack");
   protected static final String HOST_STACK_VERSION_VERSION_PROPERTY_ID         = PropertyHelper.getPropertyId("HostStackVersions", "version");
   protected static final String HOST_STACK_VERSION_STATE_PROPERTY_ID           = PropertyHelper.getPropertyId("HostStackVersions", "state");
   protected static final String HOST_STACK_VERSION_REPOSITORIES_PROPERTY_ID    = PropertyHelper.getPropertyId("HostStackVersions", "repositories");
 
+  protected static final String STACK_VERSION_REPO_VERSION_PROPERTY_ID = PropertyHelper.getPropertyId("StackVersion", "repository_version");
+  protected static final String STACK_VERSION_STACK_PROPERTY_ID    = PropertyHelper.getPropertyId("StackVersion", "stack");
+  protected static final String STACK_VERSION_VERSION_PROPERTY_ID    = PropertyHelper.getPropertyId("StackVersion", "version");
+
+  protected static final String INSTALL_PACKAGES_ACTION = "install_packages";
+  protected static final String INSTALL_PACKAGES_FULL_NAME = "Distribute repositories/install packages";
+
+
   @SuppressWarnings("serial")
   private static Set<String> pkPropertyIds = new HashSet<String>() {
     {
+      add(HOST_STACK_VERSION_CLUSTER_NAME_PROPERTY_ID);
       add(HOST_STACK_VERSION_HOST_NAME_PROPERTY_ID);
       add(HOST_STACK_VERSION_ID_PROPERTY_ID);
       add(HOST_STACK_VERSION_STACK_PROPERTY_ID);
       add(HOST_STACK_VERSION_VERSION_PROPERTY_ID);
+      add(STACK_VERSION_REPO_VERSION_PROPERTY_ID);
+      add(STACK_VERSION_STACK_PROPERTY_ID);
+      add(STACK_VERSION_VERSION_PROPERTY_ID);
     }
   };
 
@@ -69,17 +108,22 @@ public class HostStackVersionResourceProvider extends AbstractResourceProvider {
   private static Set<String> propertyIds = new HashSet<String>() {
     {
       add(HOST_STACK_VERSION_ID_PROPERTY_ID);
+      add(HOST_STACK_VERSION_CLUSTER_NAME_PROPERTY_ID);
       add(HOST_STACK_VERSION_HOST_NAME_PROPERTY_ID);
       add(HOST_STACK_VERSION_STACK_PROPERTY_ID);
       add(HOST_STACK_VERSION_VERSION_PROPERTY_ID);
       add(HOST_STACK_VERSION_STATE_PROPERTY_ID);
       add(HOST_STACK_VERSION_REPOSITORIES_PROPERTY_ID);
+      add(STACK_VERSION_REPO_VERSION_PROPERTY_ID);
+      add(STACK_VERSION_STACK_PROPERTY_ID);
+      add(STACK_VERSION_VERSION_PROPERTY_ID);
     }
   };
 
   @SuppressWarnings("serial")
   private static Map<Type, String> keyPropertyIds = new HashMap<Type, String>() {
     {
+      put(Resource.Type.Cluster, HOST_STACK_VERSION_CLUSTER_NAME_PROPERTY_ID);
       put(Resource.Type.Host, HOST_STACK_VERSION_HOST_NAME_PROPERTY_ID);
       put(Resource.Type.HostStackVersion, HOST_STACK_VERSION_ID_PROPERTY_ID);
       put(Resource.Type.Stack, HOST_STACK_VERSION_STACK_PROPERTY_ID);
@@ -90,11 +134,27 @@ public class HostStackVersionResourceProvider extends AbstractResourceProvider {
   @Inject
   private static HostVersionDAO hostVersionDAO;
 
+  @Inject
+  private static RepositoryVersionDAO repositoryVersionDAO;
+
+  private static Gson gson = StageUtils.getGson();
+
+  @Inject
+  private static StageFactory stageFactory;
+
+  @Inject
+  private static RequestFactory requestFactory;
+
+  @Inject
+  private static Provider<AmbariActionExecutionHelper> actionExecutionHelper;
+
+
   /**
    * Constructor.
    */
-  public HostStackVersionResourceProvider() {
-    super(propertyIds, keyPropertyIds);
+  public HostStackVersionResourceProvider(
+          AmbariManagementController managementController) {
+    super(propertyIds, keyPropertyIds, managementController);
   }
 
   @Override
@@ -139,11 +199,202 @@ public class HostStackVersionResourceProvider extends AbstractResourceProvider {
     return resources;
   }
 
+
   @Override
   public RequestStatus createResources(Request request) throws SystemException,
-      UnsupportedPropertyException, ResourceAlreadyExistsException,
-      NoSuchParentResourceException {
-    throw new SystemException("Method not supported");
+          UnsupportedPropertyException, ResourceAlreadyExistsException,
+          NoSuchParentResourceException {
+    Iterator<Map<String,Object>> iterator = request.getProperties().iterator();
+    String hostName;
+    String desiredRepoVersion;
+    String stackName;
+    String stackVersion;
+    if (request.getProperties().size() != 1) {
+      throw new UnsupportedOperationException("Multiple requests cannot be executed at the same time.");
+    }
+
+    Map<String, Object> propertyMap  = iterator.next();
+
+    Set<String> requiredProperties = new HashSet<String>(){{
+      add(HOST_STACK_VERSION_HOST_NAME_PROPERTY_ID);
+      add(STACK_VERSION_REPO_VERSION_PROPERTY_ID);
+      add(STACK_VERSION_STACK_PROPERTY_ID);
+      add(STACK_VERSION_VERSION_PROPERTY_ID);
+    }};
+
+    for (String requiredProperty : requiredProperties) {
+      if (! propertyMap.containsKey(requiredProperty)) {
+        throw new IllegalArgumentException(
+                String.format("The required property %s is not defined",
+                        requiredProperty));
+      }
+    }
+    String clName = (String) propertyMap.get(HOST_STACK_VERSION_CLUSTER_NAME_PROPERTY_ID);
+    hostName = (String) propertyMap.get(HOST_STACK_VERSION_HOST_NAME_PROPERTY_ID);
+    desiredRepoVersion = (String) propertyMap.get(STACK_VERSION_REPO_VERSION_PROPERTY_ID);
+
+    Host host;
+    try {
+      host = getManagementController().getClusters().getHost(hostName);
+    } catch (AmbariException e) {
+      throw new NoSuchParentResourceException(
+              String.format("Can not find host %s", hostName), e);
+    }
+    AmbariManagementController managementController = getManagementController();
+    AmbariMetaInfo ami = managementController.getAmbariMetaInfo();
+
+    stackName = (String) propertyMap.get(STACK_VERSION_STACK_PROPERTY_ID);
+    stackVersion = (String) propertyMap.get(STACK_VERSION_VERSION_PROPERTY_ID);
+    String stackId = new StackId(stackName, stackVersion).getStackId();
+    if (!ami.isSupportedStack(stackName, stackVersion)) {
+      throw new NoSuchParentResourceException(String.format("Stack %s is not supported",
+              stackId));
+    }
+
+    Set<Cluster> clusterSet;
+    if (clName == null) {
+      try {
+        clusterSet = getManagementController().getClusters().getClustersForHost(hostName);
+      } catch (AmbariException e) {
+        throw new NoSuchParentResourceException(String.format((
+                "Host %s does belong to any cluster"
+        ), hostName), e);
+      }
+    } else {
+      Cluster cluster;
+      try {
+        cluster = getManagementController().getClusters().getCluster(clName);
+      } catch (AmbariException e) {
+        throw new NoSuchParentResourceException(String.format((
+                "Cluster %s does not exist"
+        ), clName), e);
+      }
+      clusterSet = Collections.singleton(cluster);
+    }
+
+    // Select all clusters that contain the desired repo version
+    Set<Cluster> selectedClusters = new HashSet<Cluster>();
+    for (Cluster cluster : clusterSet) {
+      if(cluster.getCurrentStackVersion().getStackId().equals(stackId)) {
+        selectedClusters.add(cluster);
+      }
+    }
+
+    Cluster cluster;
+    if (selectedClusters.size() != 1) {
+      throw new UnsupportedOperationException(String.format("Host %s belongs to %d clusters " +
+              "with stack id %s. Performing %s action on multiple clusters " +
+              "is not supported", hostName, selectedClusters.size(), stackId, INSTALL_PACKAGES_FULL_NAME));
+    } else {
+      cluster = selectedClusters.iterator().next();
+    }
+
+    RepositoryVersionEntity repoVersionEnt = repositoryVersionDAO.findByStackAndVersion(stackId, desiredRepoVersion);
+    if (repoVersionEnt==null) {
+      throw new IllegalArgumentException(String.format(
+              "Repo version %s is not available for stack %s",
+              desiredRepoVersion, stackId));
+    }
+
+    List<OperatingSystemEntity> operatingSystems = repoVersionEnt.getOperatingSystems();
+    Map<String, List<RepositoryEntity>> perOsRepos = new HashMap<String, List<RepositoryEntity>>();
+    for (OperatingSystemEntity operatingSystem : operatingSystems) {
+      perOsRepos.put(operatingSystem.getOsType(), operatingSystem.getRepositories());
+    }
+
+    // Determine repositories for host
+    final List<RepositoryEntity> repoInfo = perOsRepos.get(host.getOsFamily());
+    if (repoInfo == null) {
+      throw new SystemException(String.format("Repositories for os type %s are " +
+                      "not defined. Repo version=%s, stackId=%s",
+              host.getOsFamily(), desiredRepoVersion, stackId));
+    }
+    // For every host at cluster, determine packages for all installed services
+    List<ServiceOsSpecific.Package> packages = new ArrayList<ServiceOsSpecific.Package>();
+    Set<String> servicesOnHost = new HashSet<String>();
+    List<ServiceComponentHost> components = cluster.getServiceComponentHosts(host.getHostName());
+    for (ServiceComponentHost component : components) {
+      servicesOnHost.add(component.getServiceName());
+    }
+
+    for (String serviceName : servicesOnHost) {
+      ServiceInfo info;
+      try {
+        info = ami.getService(stackName, stackVersion, serviceName);
+      } catch (AmbariException e) {
+        throw new SystemException("Can not enumerate services", e);
+      }
+      List<ServiceOsSpecific.Package> packagesForService = managementController.getPackagesForServiceHost(info,
+              new HashMap<String, String>(), // Contents are ignored
+              host.getOsFamily());
+      packages.addAll(packagesForService);
+    }
+    final String packageList = gson.toJson(packages);
+    final String repoList = gson.toJson(repoInfo);
+
+    Map<String, String> params = new HashMap<String, String>(){{
+      put("base_urls", repoList);
+      put("package_list", packageList);
+    }};
+
+    // Create custom action
+    RequestResourceFilter filter = new RequestResourceFilter(null, null,
+            Collections.singletonList(hostName));
+
+    ActionExecutionContext actionContext = new ActionExecutionContext(
+            cluster.getClusterName(), INSTALL_PACKAGES_ACTION,
+            Collections.singletonList(filter),
+            params);
+    actionContext.setTimeout((short) 60);
+
+    String caption = String.format(INSTALL_PACKAGES_FULL_NAME + " on host %s", hostName);
+    RequestStageContainer req = createRequest(caption);
+
+    Map<String, String> hostLevelParams = new HashMap<String, String>();
+    hostLevelParams.put(JDK_LOCATION, getManagementController().getJdkResourceUrl());
+
+    Stage stage = stageFactory.createNew(req.getId(),
+            "/tmp/ambari",
+            cluster.getClusterName(),
+            cluster.getClusterId(),
+            caption,
+            "{}",
+            "{}",
+            StageUtils.getGson().toJson(hostLevelParams));
+
+    long stageId = req.getLastStageId() + 1;
+    if (0L == stageId) {
+      stageId = 1L;
+    }
+    stage.setStageId(stageId);
+    req.addStages(Collections.singletonList(stage));
+
+    try {
+      actionExecutionHelper.get().addExecutionCommandsToStage(actionContext, stage);
+    } catch (AmbariException e) {
+      throw new SystemException("Can not modify stage", e);
+    }
+
+    try {
+      req.persist();
+
+      //TODO: create cluster version entity
+      //clusterVersionDAO.create();
+    } catch (AmbariException e) {
+      throw new SystemException("Can not persist request", e);
+    }
+    return getRequestStatus(req.getRequestStatusResponse());
+  }
+
+
+  private RequestStageContainer createRequest(String caption) {
+    ActionManager actionManager = getManagementController().getActionManager();
+
+    RequestStageContainer requestStages = new RequestStageContainer(
+            actionManager.getNextRequestId(), null, requestFactory, actionManager);
+    requestStages.setRequestContext(String.format(caption));
+
+    return requestStages;
   }
 
   @Override

+ 9 - 0
ambari-server/src/main/java/org/apache/ambari/server/controller/spi/NoSuchParentResourceException.java

@@ -32,4 +32,13 @@ public class NoSuchParentResourceException extends Exception {
   public NoSuchParentResourceException(String msg, Throwable throwable) {
     super(msg, throwable);
   }
+
+  /**
+   * Constructor.
+   *
+   * @param msg        the message
+   */
+  public NoSuchParentResourceException(String msg) {
+    super(msg);
+  }
 }

+ 14 - 7
ambari-server/src/main/resources/custom_actions/scripts/install_packages.py

@@ -44,8 +44,15 @@ class InstallPackages(Script):
 
     # Parse parameters
     config = Script.get_config()
-    base_urls = json.loads(config['commandParams']['base_urls'])
-    package_list = json.loads(config['commandParams']['package_list'])
+
+    # Select dict that contains parameters
+    try:
+      base_urls = json.loads(config['roleParams']['base_urls'])
+      package_list = json.loads(config['roleParams']['package_list'])
+    except KeyError:
+      # Last try
+      base_urls = json.loads(config['commandParams']['base_urls'])
+      package_list = json.loads(config['commandParams']['package_list'])
 
     # Install/update repositories
     installed_repositories = []
@@ -63,7 +70,7 @@ class InstallPackages(Script):
     if not delayed_fail:
       try:
         for package in package_list:
-          Package(package)
+          Package(package['name'])
         package_install_result = True
       except Exception, err:
         print "Can not install packages."
@@ -87,18 +94,18 @@ class InstallPackages(Script):
     template = "repo_suse_rhel.j2" if OSCheck.is_redhat_family() or OSCheck.is_suse_family() else "repo_ubuntu.j2"
 
     repo = {
-      'repoName': url_info['id']
+      'repoName': url_info['repositoryId']
     }
 
-    if not 'baseurl' in url_info:
+    if not 'baseUrl' in url_info:
       repo['baseurl'] = None
     else:
-      repo['baseurl'] = url_info['baseurl']
+      repo['baseurl'] = url_info['baseUrl']
 
     if not 'mirrorsList' in url_info:
       repo['mirrorsList'] = None
     else:
-      repo['mirrorsList'] = url_info['mirrorslist']
+      repo['mirrorsList'] = url_info['mirrorsList']
 
     ubuntu_components = [repo['repoName']] + self.UBUNTU_REPO_COMPONENTS_POSTFIX
 

+ 4 - 4
ambari-server/src/test/java/org/apache/ambari/server/api/services/HostStackVersionServiceTest.java

@@ -44,13 +44,13 @@ public class HostStackVersionServiceTest extends BaseServiceTest {
     Object[] args;
 
     //getHostStackVersions
-    hostStackVersionService = new TestHostStackVersionService("host");
+    hostStackVersionService = new TestHostStackVersionService("host", "cluster");
     m = hostStackVersionService.getClass().getMethod("getHostStackVersions", HttpHeaders.class, UriInfo.class);
     args = new Object[] {getHttpHeaders(), getUriInfo()};
     listInvocations.add(new ServiceTestInvocation(Request.Type.GET, hostStackVersionService, m, args, null));
 
     //getHostStackVersion
-    hostStackVersionService = new TestHostStackVersionService("host");
+    hostStackVersionService = new TestHostStackVersionService("host", "cluster");
     m = hostStackVersionService.getClass().getMethod("getHostStackVersion", HttpHeaders.class, UriInfo.class, String.class);
     args = new Object[] {getHttpHeaders(), getUriInfo(), "1"};
     listInvocations.add(new ServiceTestInvocation(Request.Type.GET, hostStackVersionService, m, args, null));
@@ -59,8 +59,8 @@ public class HostStackVersionServiceTest extends BaseServiceTest {
   }
 
   private class TestHostStackVersionService extends HostStackVersionService {
-    public TestHostStackVersionService(String hostName) {
-      super(hostName);
+    public TestHostStackVersionService(String hostName, String clusterName) {
+      super(hostName, clusterName);
     }
 
     @Override

+ 36 - 1
ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java

@@ -29,9 +29,10 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertSame;
+import static org.junit.Assert.assertThat;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
-
+import static org.hamcrest.CoreMatchers.*;
 import java.io.File;
 import java.io.IOException;
 import java.io.StringReader;
@@ -111,6 +112,8 @@ import org.apache.ambari.server.state.ServiceComponentFactory;
 import org.apache.ambari.server.state.ServiceComponentHost;
 import org.apache.ambari.server.state.ServiceComponentHostFactory;
 import org.apache.ambari.server.state.ServiceFactory;
+import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.ServiceOsSpecific;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.state.State;
@@ -10342,6 +10345,38 @@ public class AmbariManagementControllerTest {
 
   }
 
+  @Test
+  public void testGetPackagesForServiceHost() throws Exception {
+    ServiceInfo service = ambariMetaInfo.getStack("HDP", "2.0.1").getService("HIVE");
+    HashMap<String, String> hostParams = new HashMap<String, String>();
+
+    Map<String, ServiceOsSpecific.Package> packages = new HashMap<String, ServiceOsSpecific.Package>();
+    String [] packageNames = {"hive", "mysql-connector-java", "mysql", "mysql-server", "mysql-client"};
+    for (String packageName : packageNames) {
+      ServiceOsSpecific.Package pkg = new ServiceOsSpecific.Package();
+      pkg.setName(packageName);
+      packages.put(packageName, pkg);
+    }
+
+    List<ServiceOsSpecific.Package> rhel5Packages = controller.getPackagesForServiceHost(service, hostParams, "redhat5");
+    List<ServiceOsSpecific.Package> expectedRhel5 = Arrays.asList(
+            packages.get("hive"),
+            packages.get("mysql-connector-java"),
+            packages.get("mysql"),
+            packages.get("mysql-server")
+    );
+
+    List<ServiceOsSpecific.Package> sles11Packages = controller.getPackagesForServiceHost(service, hostParams, "suse11");
+    List<ServiceOsSpecific.Package> expectedSles11 = Arrays.asList(
+            packages.get("hive"),
+            packages.get("mysql-connector-java"),
+            packages.get("mysql"),
+            packages.get("mysql-client")
+    );
+    assertThat(rhel5Packages, is(expectedRhel5));
+    assertThat(sles11Packages, is(expectedSles11));
+  }
+
   // this is a temporary measure as a result of moving updateHostComponents from AmbariManagementController
   // to HostComponentResourceProvider.  Eventually the tests should be moved out of this class.
   private RequestStatusResponse updateHostComponents(Set<ServiceComponentHostRequest> requests,

+ 243 - 0
ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java

@@ -0,0 +1,243 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.controller.internal;
+
+import com.google.inject.AbstractModule;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.persist.PersistService;
+import com.google.inject.util.Modules;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.actionmanager.ActionManager;
+import org.apache.ambari.server.actionmanager.HostRoleCommand;
+import org.apache.ambari.server.agent.AgentEnv;
+import org.apache.ambari.server.agent.DiskInfo;
+import org.apache.ambari.server.agent.HostInfo;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.AmbariManagementControllerImpl;
+import org.apache.ambari.server.controller.HostResponse;
+import org.apache.ambari.server.controller.RequestStatusResponse;
+import org.apache.ambari.server.controller.ResourceProviderFactory;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceProvider;
+import org.apache.ambari.server.controller.utilities.PropertyHelper;
+import org.apache.ambari.server.metadata.ActionMetadata;
+import org.apache.ambari.server.orm.GuiceJpaInitializer;
+import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
+import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
+import org.apache.ambari.server.orm.entities.HostVersionEntity;
+import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
+import org.apache.ambari.server.state.AgentVersion;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.DesiredConfig;
+import org.apache.ambari.server.state.Host;
+import org.apache.ambari.server.state.HostConfig;
+import org.apache.ambari.server.state.HostEvent;
+import org.apache.ambari.server.state.HostHealthStatus;
+import org.apache.ambari.server.state.HostState;
+import org.apache.ambari.server.state.MaintenanceState;
+import org.apache.ambari.server.state.ServiceComponentHost;
+import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.ServiceOsSpecific;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.StackInfo;
+import org.apache.ambari.server.state.fsm.InvalidStateTransitionException;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+
+import static org.easymock.EasyMock.anyLong;
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.eq;
+import static org.easymock.EasyMock.capture;
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.createStrictMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertSame;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+/**
+ * ClusterStackVersionResourceProvider tests.
+ */
+//@RunWith(PowerMockRunner.class)
+@PrepareForTest(AmbariManagementControllerImpl.class)
+public class ClusterStackVersionResourceProviderTest {
+
+  private Injector injector;
+  private AmbariMetaInfo ambariMetaInfo;
+  private RepositoryVersionDAO repositoryVersionDAOMock;
+
+  private String operatingSystemsJson = "[\n" +
+          "   {\n" +
+          "      \"repositories\":[\n" +
+          "         {\n" +
+          "            \"Repositories/base_url\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0\",\n" +
+          "            \"Repositories/repo_name\":\"HDP-UTILS\",\n" +
+          "            \"Repositories/repo_id\":\"HDP-UTILS-1.1.0.20\"\n" +
+          "         },\n" +
+          "         {\n" +
+          "            \"Repositories/base_url\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0\",\n" +
+          "            \"Repositories/repo_name\":\"HDP\",\n" +
+          "            \"Repositories/repo_id\":\"HDP-2.2\"\n" +
+          "         }\n" +
+          "      ],\n" +
+          "      \"OperatingSystems/os_type\":\"redhat6\"\n" +
+          "   }\n" +
+          "]";
+
+  @Before
+  public void setup() throws Exception {
+    // Create instances of mocks
+    repositoryVersionDAOMock = createNiceMock(RepositoryVersionDAO.class);
+    // Initialize injector
+    InMemoryDefaultTestModule module = new InMemoryDefaultTestModule();
+    injector = Guice.createInjector(Modules.override(module).with(new MockModule()));
+    injector.getInstance(GuiceJpaInitializer.class);
+    ambariMetaInfo = injector.getInstance(AmbariMetaInfo.class);
+    ambariMetaInfo.init();
+  }
+
+  @After
+  public void teardown() {
+    injector.getInstance(PersistService.class).stop();
+  }
+
+
+  @Test
+  public void testCreateResources() throws Exception {
+    Resource.Type type = Resource.Type.ClusterStackVersion;
+
+    AmbariManagementController managementController = createMock(AmbariManagementController.class);
+    Clusters clusters = createNiceMock(Clusters.class);
+    Cluster cluster = createNiceMock(Cluster.class);
+    StackId stackId = new StackId("HDP", "2.0.1");
+
+    final Host host1 = createNiceMock("host1", Host.class);
+    final Host host2 = createNiceMock("host2", Host.class);
+    expect(host1.getHostName()).andReturn("host1").anyTimes();
+    expect(host1.getOsFamily()).andReturn("redhat6").anyTimes();
+    expect(host2.getHostName()).andReturn("host2").anyTimes();
+    expect(host2.getOsFamily()).andReturn("redhat6").anyTimes();
+    replay(host1, host2);
+    Map<String, Host> hostsForCluster = new HashMap<String, Host>() {{
+      put(host1.getHostName(), host1);
+      put(host2.getHostName(), host2);
+    }};
+
+    ServiceComponentHost sch = createMock(ServiceComponentHost.class);
+    List<ServiceComponentHost> schs = Collections.singletonList(sch);
+
+    RepositoryVersionEntity repoVersion = new RepositoryVersionEntity();
+    repoVersion.setOperatingSystems(operatingSystemsJson);
+
+    ServiceOsSpecific.Package hivePackage = new ServiceOsSpecific.Package();
+    hivePackage.setName("hive");
+    List<ServiceOsSpecific.Package> packages = Collections.singletonList(hivePackage);
+
+    ActionManager actionManager = createNiceMock(ActionManager.class);
+
+    RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
+    ResourceProviderFactory resourceProviderFactory = createNiceMock(ResourceProviderFactory.class);
+    ResourceProvider csvResourceProvider = createNiceMock(ClusterStackVersionResourceProvider.class);
+
+    AbstractControllerResourceProvider.init(resourceProviderFactory);
+
+    expect(managementController.getClusters()).andReturn(clusters).anyTimes();
+    expect(managementController.getAmbariMetaInfo()).andReturn(ambariMetaInfo).anyTimes();
+    expect(managementController.getActionManager()).andReturn(actionManager).anyTimes();
+    expect(managementController.getJdkResourceUrl()).andReturn("/JdkResourceUrl").anyTimes();
+    expect(managementController.getPackagesForServiceHost(anyObject(ServiceInfo.class),
+            (Map<String, String>) anyObject(List.class), anyObject(String.class))).andReturn(packages).anyTimes();
+
+    expect(resourceProviderFactory.getHostResourceProvider(anyObject(Set.class), anyObject(Map.class),
+        eq(managementController))).andReturn(csvResourceProvider).anyTimes();
+
+    expect(clusters.getCluster(anyObject(String.class))).andReturn(cluster);
+    expect(clusters.getHostsForCluster(anyObject(String.class))).andReturn(hostsForCluster);
+
+    expect(cluster.getCurrentStackVersion()).andReturn(stackId);
+    expect(cluster.getServiceComponentHosts(anyObject(String.class))).andReturn(schs).anyTimes();
+
+    expect(sch.getServiceName()).andReturn("HIVE").anyTimes();
+
+    expect(repositoryVersionDAOMock.findByStackAndVersion(anyObject(String.class),
+            anyObject(String.class))).andReturn(repoVersion);
+
+    expect(actionManager.getRequestTasks(anyLong())).andReturn(Collections.<HostRoleCommand>emptyList()).anyTimes();
+
+    // replay
+    replay(managementController, response, clusters, resourceProviderFactory, csvResourceProvider,
+            cluster, repositoryVersionDAOMock, sch, actionManager);
+
+    ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider(
+        type,
+        PropertyHelper.getPropertyIds(type),
+        PropertyHelper.getKeyPropertyIds(type),
+        managementController);
+
+    injector.injectMembers(provider);
+
+    // add the property map to a set for the request.  add more maps for multiple creates
+    Set<Map<String, Object>> propertySet = new LinkedHashSet<Map<String, Object>>();
+
+    Map<String, Object> properties = new LinkedHashMap<String, Object>();
+
+    // add properties to the request map
+    properties.put(ClusterStackVersionResourceProvider.CLUSTER_STACK_VERSION_CLUSTER_NAME_PROPERTY_ID, "Cluster100");
+    properties.put(ClusterStackVersionResourceProvider.STACK_VERSION_REPO_VERSION_PROPERTY_ID, "2.2.0.1-885");
+
+    propertySet.add(properties);
+
+    // create the request
+    Request request = PropertyHelper.getCreateRequest(propertySet, null);
+
+    provider.createResources(request);
+
+    // verify
+    verify(managementController, response, clusters);
+  }
+
+  public class MockModule extends AbstractModule {
+    @Override
+    protected void configure() {
+      bind(RepositoryVersionDAO.class).toInstance(repositoryVersionDAOMock);
+    }
+  }
+}

+ 217 - 0
ambari-server/src/test/java/org/apache/ambari/server/controller/internal/HostStackVersionResourceProviderTest.java

@@ -0,0 +1,217 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.controller.internal;
+
+import com.google.inject.AbstractModule;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.persist.PersistService;
+import com.google.inject.util.Modules;
+import org.apache.ambari.server.actionmanager.ActionManager;
+import org.apache.ambari.server.actionmanager.HostRoleCommand;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.AmbariManagementControllerImpl;
+import org.apache.ambari.server.controller.RequestStatusResponse;
+import org.apache.ambari.server.controller.ResourceProviderFactory;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceProvider;
+import org.apache.ambari.server.controller.utilities.PropertyHelper;
+import org.apache.ambari.server.orm.GuiceJpaInitializer;
+import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
+import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
+import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Host;
+import org.apache.ambari.server.state.ServiceComponentHost;
+import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.ServiceOsSpecific;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.StackInfo;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import static org.easymock.EasyMock.anyLong;
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.eq;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.verify;
+
+/**
+ * ClusterStackVersionResourceProvider tests.
+ */
+//@RunWith(PowerMockRunner.class)
+@PrepareForTest(AmbariManagementControllerImpl.class)
+public class HostStackVersionResourceProviderTest {
+
+  private Injector injector;
+  private AmbariMetaInfo ambariMetaInfo;
+  private RepositoryVersionDAO repositoryVersionDAOMock;
+
+  private String operatingSystemsJson = "[\n" +
+          "   {\n" +
+          "      \"repositories\":[\n" +
+          "         {\n" +
+          "            \"Repositories/base_url\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0\",\n" +
+          "            \"Repositories/repo_name\":\"HDP-UTILS\",\n" +
+          "            \"Repositories/repo_id\":\"HDP-UTILS-1.1.0.20\"\n" +
+          "         },\n" +
+          "         {\n" +
+          "            \"Repositories/base_url\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0\",\n" +
+          "            \"Repositories/repo_name\":\"HDP\",\n" +
+          "            \"Repositories/repo_id\":\"HDP-2.2\"\n" +
+          "         }\n" +
+          "      ],\n" +
+          "      \"OperatingSystems/os_type\":\"redhat6\"\n" +
+          "   }\n" +
+          "]";
+
+  @Before
+  public void setup() throws Exception {
+    // Create instances of mocks
+    repositoryVersionDAOMock = createNiceMock(RepositoryVersionDAO.class);
+    // Initialize injector
+    InMemoryDefaultTestModule module = new InMemoryDefaultTestModule();
+    injector = Guice.createInjector(Modules.override(module).with(new MockModule()));
+    injector.getInstance(GuiceJpaInitializer.class);
+    ambariMetaInfo = injector.getInstance(AmbariMetaInfo.class);
+    ambariMetaInfo.init();
+  }
+
+  @After
+  public void teardown() {
+    injector.getInstance(PersistService.class).stop();
+  }
+
+
+  @Test
+  public void testCreateResources() throws Exception {
+    Resource.Type type = Resource.Type.HostStackVersion;
+
+    AmbariManagementController managementController = createMock(AmbariManagementController.class);
+    Clusters clusters = createNiceMock(Clusters.class);
+    Cluster cluster = createNiceMock(Cluster.class);
+    StackId stackId = new StackId("HDP", "2.0.1");
+
+    final Host host1 = createNiceMock("host1", Host.class);
+    expect(host1.getHostName()).andReturn("host1").anyTimes();
+    expect(host1.getOsFamily()).andReturn("redhat6").anyTimes();
+    replay(host1);
+    Map<String, Host> hostsForCluster = new HashMap<String, Host>() {{
+      put(host1.getHostName(), host1);
+    }};
+
+    ServiceComponentHost sch = createMock(ServiceComponentHost.class);
+    List<ServiceComponentHost> schs = Collections.singletonList(sch);
+
+    RepositoryVersionEntity repoVersion = new RepositoryVersionEntity();
+    repoVersion.setOperatingSystems(operatingSystemsJson);
+
+    ServiceOsSpecific.Package hivePackage = new ServiceOsSpecific.Package();
+    hivePackage.setName("hive");
+    List<ServiceOsSpecific.Package> packages = Collections.singletonList(hivePackage);
+
+    ActionManager actionManager = createNiceMock(ActionManager.class);
+
+    RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
+    ResourceProviderFactory resourceProviderFactory = createNiceMock(ResourceProviderFactory.class);
+    ResourceProvider csvResourceProvider = createNiceMock(ClusterStackVersionResourceProvider.class);
+
+    AbstractControllerResourceProvider.init(resourceProviderFactory);
+
+    expect(managementController.getClusters()).andReturn(clusters).anyTimes();
+    expect(managementController.getAmbariMetaInfo()).andReturn(ambariMetaInfo).anyTimes();
+    expect(managementController.getActionManager()).andReturn(actionManager).anyTimes();
+    expect(managementController.getJdkResourceUrl()).andReturn("/JdkResourceUrl").anyTimes();
+    expect(managementController.getPackagesForServiceHost(anyObject(ServiceInfo.class),
+            (Map<String, String>) anyObject(List.class), anyObject(String.class))).andReturn(packages).anyTimes();
+
+    expect(resourceProviderFactory.getHostResourceProvider(anyObject(Set.class), anyObject(Map.class),
+        eq(managementController))).andReturn(csvResourceProvider).anyTimes();
+
+    expect(clusters.getCluster(anyObject(String.class))).andReturn(cluster);
+    expect(clusters.getHost(anyObject(String.class))).andReturn(host1);
+
+    expect(cluster.getCurrentStackVersion()).andReturn(stackId);
+    expect(cluster.getServiceComponentHosts(anyObject(String.class))).andReturn(schs).anyTimes();
+
+    expect(sch.getServiceName()).andReturn("HIVE").anyTimes();
+
+    expect(repositoryVersionDAOMock.findByStackAndVersion(anyObject(String.class),
+            anyObject(String.class))).andReturn(repoVersion);
+
+    expect(actionManager.getRequestTasks(anyLong())).andReturn(Collections.<HostRoleCommand>emptyList()).anyTimes();
+
+    // replay
+    replay(managementController, response, clusters, resourceProviderFactory, csvResourceProvider,
+            cluster, repositoryVersionDAOMock, sch, actionManager);
+
+    ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider(
+        type,
+        PropertyHelper.getPropertyIds(type),
+        PropertyHelper.getKeyPropertyIds(type),
+        managementController);
+
+    injector.injectMembers(provider);
+
+    // add the property map to a set for the request.  add more maps for multiple creates
+    Set<Map<String, Object>> propertySet = new LinkedHashSet<Map<String, Object>>();
+
+    Map<String, Object> properties = new LinkedHashMap<String, Object>();
+
+    // add properties to the request map
+    properties.put(HostStackVersionResourceProvider.HOST_STACK_VERSION_CLUSTER_NAME_PROPERTY_ID, "Cluster100");
+    properties.put(HostStackVersionResourceProvider.STACK_VERSION_REPO_VERSION_PROPERTY_ID, "2.2.0.1-885");
+    properties.put(HostStackVersionResourceProvider.STACK_VERSION_STACK_PROPERTY_ID, "HDP");
+    properties.put(HostStackVersionResourceProvider.STACK_VERSION_VERSION_PROPERTY_ID, "2.0.1");
+    properties.put(HostStackVersionResourceProvider.HOST_STACK_VERSION_HOST_NAME_PROPERTY_ID, "host1");
+
+    propertySet.add(properties);
+
+    // create the request
+    Request request = PropertyHelper.getCreateRequest(propertySet, null);
+
+    provider.createResources(request);
+
+    // verify
+    verify(managementController, response, clusters);
+  }
+
+  public class MockModule extends AbstractModule {
+    @Override
+    protected void configure() {
+      bind(RepositoryVersionDAO.class).toInstance(repositoryVersionDAOMock);
+    }
+  }
+}

+ 30 - 22
ambari-server/src/test/python/custom_actions/TestInstallPackages.py

@@ -43,25 +43,29 @@ class TestInstallPackages(RMFTestCase):
     self.assertEquals(put_structured_out.call_args[0][0],
                       {'package_installation_result': 'SUCCESS',
                        'ambari_repositories': []})
-    self.assertResourceCalled('Repository', 'HDP-2.2.0.0-885',
-                              base_url=u'http://host1/hdp',
+    self.assertResourceCalled('Repository', 'HDP-UTILS-1.1.0.20',
+                              base_url='http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0',
                               action=['create'],
-                              components=[u'HDP-2.2.0.0-885', 'main'],
+                              components=[u'HDP-UTILS-1.1.0.20', 'main'],
                               repo_template='repo_suse_rhel.j2',
-                              repo_file_name=u'HDP-2.2.0.0-885',
+                              repo_file_name='HDP-UTILS-1.1.0.20',
                               mirror_list=None,
     )
-    self.assertResourceCalled('Repository', 'HDP-UTILS-1.0.0.20',
-                              base_url=u'http://host1/hdp-utils',
+    self.assertResourceCalled('Repository', 'HDP-2.2',
+                              base_url='http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0',
                               action=['create'],
-                              components=[u'HDP-UTILS-1.0.0.20', 'main'],
+                              components=[u'HDP-2.2', 'main'],
                               repo_template='repo_suse_rhel.j2',
-                              repo_file_name=u'HDP-UTILS-1.0.0.20',
+                              repo_file_name='HDP-2.2',
                               mirror_list=None,
     )
-    self.assertResourceCalled('Package', 'python-rrdtool-1.4.5', )
-    self.assertResourceCalled('Package', 'libganglia-3.5.0-99', )
-    self.assertResourceCalled('Package', 'ganglia-*', )
+    self.assertResourceCalled('Package', 'hadoop_2_2_*',)
+    self.assertResourceCalled('Package', 'snappy',)
+    self.assertResourceCalled('Package', 'snappy-devel',)
+    self.assertResourceCalled('Package', 'lzo',)
+    self.assertResourceCalled('Package', 'hadooplzo_2_2_*',)
+    self.assertResourceCalled('Package', 'hadoop_2_2_*-libhdfs',)
+    self.assertResourceCalled('Package', 'ambari-log4j',)
     self.assertNoMoreResources()
 
 
@@ -80,25 +84,29 @@ class TestInstallPackages(RMFTestCase):
     self.assertEquals(put_structured_out.call_args[0][0],
                       {'package_installation_result': 'SUCCESS',
                        'ambari_repositories': ["HDP-UTILS-1.0.0.20"]})
-    self.assertResourceCalled('Repository', 'HDP-2.2.0.0-885',
-                              base_url=u'http://host1/hdp',
+    self.assertResourceCalled('Repository', 'HDP-UTILS-1.1.0.20',
+                              base_url='http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0',
                               action=['create'],
-                              components=[u'HDP-2.2.0.0-885', 'main'],
+                              components=[u'HDP-UTILS-1.1.0.20', 'main'],
                               repo_template='repo_suse_rhel.j2',
-                              repo_file_name=u'HDP-2.2.0.0-885',
+                              repo_file_name='HDP-UTILS-1.1.0.20',
                               mirror_list=None,
     )
-    self.assertResourceCalled('Repository', 'HDP-UTILS-1.0.0.20',
-                              base_url=u'http://host1/hdp-utils',
+    self.assertResourceCalled('Repository', 'HDP-2.2',
+                              base_url='http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0',
                               action=['create'],
-                              components=[u'HDP-UTILS-1.0.0.20', 'main'],
+                              components=[u'HDP-2.2', 'main'],
                               repo_template='repo_suse_rhel.j2',
-                              repo_file_name=u'HDP-UTILS-1.0.0.20',
+                              repo_file_name='HDP-2.2',
                               mirror_list=None,
     )
-    self.assertResourceCalled('Package', 'python-rrdtool-1.4.5', )
-    self.assertResourceCalled('Package', 'libganglia-3.5.0-99', )
-    self.assertResourceCalled('Package', 'ganglia-*', )
+    self.assertResourceCalled('Package', 'hadoop_2_2_*',)
+    self.assertResourceCalled('Package', 'snappy',)
+    self.assertResourceCalled('Package', 'snappy-devel',)
+    self.assertResourceCalled('Package', 'lzo',)
+    self.assertResourceCalled('Package', 'hadooplzo_2_2_*',)
+    self.assertResourceCalled('Package', 'hadoop_2_2_*-libhdfs',)
+    self.assertResourceCalled('Package', 'ambari-log4j',)
     self.assertNoMoreResources()
 
 

+ 4 - 4
ambari-server/src/test/python/custom_actions/configs/install_packages_config.json

@@ -22,8 +22,8 @@
     }, 
     "commandType": "EXECUTION_COMMAND", 
     "roleParams": {
-        "base_urls": "[{\"id\": \"HDP-2.2.0.0-885\", \"type\": \"HDP\",\"baseurl\": \"http://host1/hdp\"}, {\"id\": \"HDP-UTILS-1.0.0.20\", \"type\": \"HDP-UTILS\", \"baseurl\": \"http://host1/hdp-utils\"}]", 
-        "package_list": "[\"python-rrdtool-1.4.5\", \"libganglia-3.5.0-99\", \"ganglia-*\"]"
+        "base_urls": "[{\"name\":\"HDP-UTILS\",\"baseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0\",\"repositoryId\":\"HDP-UTILS-1.1.0.20\"},{\"name\":\"HDP\",\"baseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0\",\"repositoryId\":\"HDP-2.2\"}]",
+        "package_list": "[{\"name\":\"hadoop_2_2_*\"},{\"name\":\"snappy\"},{\"name\":\"snappy-devel\"},{\"name\":\"lzo\"},{\"name\":\"hadooplzo_2_2_*\"},{\"name\":\"hadoop_2_2_*-libhdfs\"},{\"name\":\"ambari-log4j\"}]"
     }, 
     "serviceName": "null", 
     "role": "install_packages", 
@@ -34,8 +34,8 @@
     "commandParams": {
         "command_timeout": "60", 
         "script_type": "PYTHON", 
-        "base_urls": "[{\"id\": \"HDP-2.2.0.0-885\", \"type\": \"HDP\",\"baseurl\": \"http://host1/hdp\"}, {\"id\": \"HDP-UTILS-1.0.0.20\", \"type\": \"HDP-UTILS\", \"baseurl\": \"http://host1/hdp-utils\"}]", 
-        "package_list": "[\"python-rrdtool-1.4.5\", \"libganglia-3.5.0-99\", \"ganglia-*\"]", 
+        "base_urls": "[{\"name\":\"HDP-UTILS\",\"baseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0\",\"repositoryId\":\"HDP-UTILS-1.1.0.20\"},{\"name\":\"HDP\",\"baseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0\",\"repositoryId\":\"HDP-2.2\"}]",
+        "package_list": "[{\"name\":\"hadoop_2_2_*\"},{\"name\":\"snappy\"},{\"name\":\"snappy-devel\"},{\"name\":\"lzo\"},{\"name\":\"hadooplzo_2_2_*\"},{\"name\":\"hadoop_2_2_*-libhdfs\"},{\"name\":\"ambari-log4j\"}]",
         "script": "install_packages.py"
     }, 
     "commandId": "14-1",