Преглед на файлове

AMBARI-7296. HCatalog and WebHCat services should not be managed as separate service (should be part of Hive service) (jaimin)

Jaimin Jetly преди 11 години
родител
ревизия
601014ed8b
променени са 100 файла, в които са добавени 1048 реда и са изтрити 2525 реда
  1. 3 3
      ambari-agent/src/test/python/ambari_agent/TestActualConfigHandler.py
  2. 2 2
      ambari-agent/src/test/python/ambari_agent/TestLiveStatus.py
  3. 0 7
      ambari-server/docs/api/v1/services.md
  4. 1 1
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessor.java
  5. 0 1
      ambari-server/src/main/java/org/apache/ambari/server/metadata/ActionMetadata.java
  6. 3 5
      ambari-server/src/main/java/org/apache/ambari/server/state/Service.java
  7. 130 60
      ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
  8. 0 2
      ambari-server/src/main/resources/custom_actions/validate_configs.py
  9. 1 3
      ambari-server/src/main/resources/stacks/HDP/1.3.2/role_command_order.json
  10. 1 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/configuration/webhcat-env.xml
  11. 0 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/configuration/webhcat-site.xml
  12. 82 72
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/metainfo.xml
  13. 0 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/templetonSmoke.sh
  14. 28 3
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
  15. 2 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/service_check.py
  16. 3 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/status_params.py
  17. 0 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat.py
  18. 1 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_server.py
  19. 2 2
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service.py
  20. 19 3
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service_check.py
  21. 1 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/metainfo.xml
  22. 0 6
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/templates/hadoop-servicegroups.cfg.j2
  23. 1 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/templates/hadoop-services.cfg.j2
  24. 0 103
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/metainfo.xml
  25. 0 20
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/__init__.py
  26. 0 78
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/params.py
  27. 0 44
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/service_check.py
  28. 1 3
      ambari-server/src/main/resources/stacks/HDP/1.3/role_command_order.json
  29. 0 5
      ambari-server/src/main/resources/stacks/HDP/1.3/services/HIVE/metainfo.xml
  30. 1 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/role_command_order.json
  31. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/configuration/webhcat-env.xml
  32. 0 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/configuration/webhcat-site.xml
  33. 91 74
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/metainfo.xml
  34. 0 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/files/templetonSmoke.sh
  35. 42 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
  36. 2 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/service_check.py
  37. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/status_params.py
  38. 0 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat.py
  39. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_server.py
  40. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_service.py
  41. 18 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_service_check.py
  42. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/metainfo.xml
  43. 1 7
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/templates/hadoop-servicegroups.cfg.j2
  44. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/templates/hadoop-services.cfg.j2
  45. 0 110
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml
  46. 0 20
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/__init__.py
  47. 0 102
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
  48. 0 45
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/service_check.py
  49. 1 3
      ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/role_command_order.json
  50. 58 42
      ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/HIVE/metainfo.xml
  51. 0 143
      ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/WEBHCAT/configuration/webhcat-site.xml
  52. 0 46
      ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/WEBHCAT/metainfo.xml
  53. 1 2
      ambari-server/src/main/resources/stacks/HDP/2.1/role_command_order.json
  54. 47 15
      ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/metainfo.xml
  55. 0 143
      ambari-server/src/main/resources/stacks/HDP/2.1/services/WEBHCAT/configuration/webhcat-site.xml
  56. 0 47
      ambari-server/src/main/resources/stacks/HDP/2.1/services/WEBHCAT/metainfo.xml
  57. 0 4
      ambari-server/src/main/resources/stacks/HDP/2.2.1/services/HIVE/metainfo.xml
  58. 0 26
      ambari-server/src/main/resources/stacks/HDP/2.2.1/services/WEBHCAT/metainfo.xml
  59. 0 0
      ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml
  60. 12 17
      ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml
  61. 10 19
      ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
  62. 26 97
      ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
  63. 13 14
      ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java
  64. 168 73
      ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
  65. 20 0
      ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py
  66. 6 6
      ambari-server/src/test/python/stacks/1.3.2/HIVE/test_webhcat_server.py
  67. 0 61
      ambari-server/src/test/python/stacks/1.3.2/WEBHCAT/test_webhcat_service_check.py
  68. 21 0
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
  69. 53 56
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
  70. 0 61
      ambari-server/src/test/python/stacks/2.0.6/WEBHCAT/test_webhcat_service_check.py
  71. 74 0
      ambari-server/src/test/resources/stacks/HDP/2.0.5/services/HIVE/metainfo.xml
  72. 0 126
      ambari-server/src/test/resources/stacks/HDP/2.0.5/services/WEBHCAT/configuration/webhcat-site.xml
  73. 0 102
      ambari-server/src/test/resources/stacks/HDP/2.0.5/services/WEBHCAT/metainfo.xml
  74. 0 28
      ambari-server/src/test/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml
  75. 0 0
      ambari-web/app/assets/data/alerts/alerts.json
  76. 0 117
      ambari-web/app/assets/data/dashboard/services.json
  77. 1 10
      ambari-web/app/assets/data/hosts/HDP2/hosts.json
  78. 0 9
      ambari-web/app/assets/data/services/HDP2/components_state.json
  79. 0 16
      ambari-web/app/assets/data/services/HDP2/services.json
  80. 1 117
      ambari-web/app/assets/data/services/host_component_actual_configs.json
  81. 1 1
      ambari-web/app/assets/data/stacks/HDP-2.1/recommendations.json
  82. 1 1
      ambari-web/app/assets/data/stacks/HDP-2.1/recommendations_configs.json
  83. 83 135
      ambari-web/app/assets/data/stacks/HDP-2.1/service_components.json
  84. 0 18
      ambari-web/app/assets/data/wizard/stack/hdp/version/1.2.0.json
  85. 0 11
      ambari-web/app/assets/data/wizard/stack/hdp/version/1.2.1.json
  86. 0 11
      ambari-web/app/assets/data/wizard/stack/hdp/version/1.3.0.json
  87. 0 11
      ambari-web/app/assets/data/wizard/stack/hdp/version/1.3.1.json
  88. 0 11
      ambari-web/app/assets/data/wizard/stack/hdp/version/2.0.1.json
  89. 0 11
      ambari-web/app/assets/data/wizard/stack/hdp/version/2.0.5.json
  90. 0 17
      ambari-web/app/assets/data/wizard/stack/hdp/version0.1.json
  91. 0 20
      ambari-web/app/assets/data/wizard/stack/hdp/version01/HCATALOG.json
  92. 0 4
      ambari-web/app/assets/data/wizard/stack/hdp/version1.2.1/HCATALOG.json
  93. 0 4
      ambari-web/app/assets/data/wizard/stack/hdp/version1.3.0/HCATALOG.json
  94. 0 4
      ambari-web/app/assets/data/wizard/stack/hdp/version131/HCATALOG.json
  95. 0 4
      ambari-web/app/assets/data/wizard/stack/hdp/version2.0.1/HCATALOG.json
  96. 0 55
      ambari-web/app/assets/data/wizard/stack/stacks.json
  97. 2 2
      ambari-web/app/controllers/main/admin/security/add/step2.js
  98. 1 1
      ambari-web/app/controllers/main/admin/serviceAccounts_controller.js
  99. 4 4
      ambari-web/app/controllers/main/service/info/configs.js
  100. 1 1
      ambari-web/app/controllers/main/service/item.js

+ 3 - 3
ambari-agent/src/test/python/ambari_agent/TestActualConfigHandler.py

@@ -34,8 +34,8 @@ class TestActualConfigHandler(TestCase):
   def setUp(self):
     LiveStatus.SERVICES = [
       "HDFS", "MAPREDUCE", "GANGLIA", "HBASE",
-      "NAGIOS", "ZOOKEEPER", "OOZIE", "HCATALOG",
-      "KERBEROS", "TEMPLETON", "HIVE", "WEBHCAT",
+      "NAGIOS", "ZOOKEEPER", "OOZIE",
+      "KERBEROS", "TEMPLETON", "HIVE",
       "YARN", "MAPREDUCE2", "FLUME", "TEZ",
       "FALCON", "STORM"
     ]
@@ -108,7 +108,7 @@ class TestActualConfigHandler(TestCase):
        "componentName" : "HIVE_METASTORE"},
       {"serviceName" : "HIVE",
        "componentName" : "MYSQL_SERVER"},
-      {"serviceName" : "WEBHCAT",
+      {"serviceName" : "HIVE",
        "componentName" : "WEBHCAT_SERVER"},
       {"serviceName" : "YARN",
        "componentName" : "RESOURCEMANAGER"},

+ 2 - 2
ambari-agent/src/test/python/ambari_agent/TestLiveStatus.py

@@ -36,8 +36,8 @@ class TestLiveStatus(TestCase):
     sys.stdout = out
     LiveStatus.SERVICES = [
       "HDFS", "MAPREDUCE", "GANGLIA", "HBASE",
-      "NAGIOS", "ZOOKEEPER", "OOZIE", "HCATALOG",
-      "KERBEROS", "TEMPLETON", "HIVE", "WEBHCAT",
+      "NAGIOS", "ZOOKEEPER", "OOZIE",
+      "KERBEROS", "TEMPLETON", "HIVE",
       "YARN", "MAPREDUCE2", "FLUME", "TEZ",
       "FALCON", "STORM"
     ]

+ 0 - 7
ambari-server/docs/api/v1/services.md

@@ -77,13 +77,6 @@ Get the collection of the services for the cluster named "c1".
           			"service_name" : "NAGIOS"
           		}
         	},
-        	{
-        		"href" : "http://your.ambari.server/api/v1/clusters/c1/services/HCATALOG",
-        		"ServiceInfo" : {
-        	  		"cluster_name" : "c1",
-        	  		"service_name" : "HCATALOG"
-        	  	}
-        	},
         	{
         		"href" : "http://your.ambari.server/api/v1/clusters/c1/services/PIG",
         		"ServiceInfo" : {

+ 1 - 1
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessor.java

@@ -851,7 +851,7 @@ public abstract class BaseBlueprintProcessor extends AbstractControllerResourceP
       Collection<DependencyInfo> nagiosDependencies = getDependenciesForComponent("NAGIOS_SERVER");
       for (DependencyInfo dependency : nagiosDependencies) {
         if (dependency.getComponentName().equals("HCAT")) {
-          dependencyConditionalServiceMap.put(dependency, "HCATALOG");
+          dependencyConditionalServiceMap.put(dependency, "HIVE");
         } else if (dependency.getComponentName().equals("OOZIE_CLIENT")) {
           dependencyConditionalServiceMap.put(dependency, "OOZIE");
         } else if (dependency.getComponentName().equals("YARN_CLIENT")) {

+ 0 - 1
ambari-server/src/main/java/org/apache/ambari/server/metadata/ActionMetadata.java

@@ -42,7 +42,6 @@ public class ActionMetadata {
       Map<String, String> serviceChecks = new HashMap<String, String>();
       
       serviceChecks.put(Service.Type.ZOOKEEPER.toString(), "ZOOKEEPER_QUORUM_SERVICE_CHECK");
-      serviceChecks.put(Service.Type.HCATALOG.toString(), "HCAT_SERVICE_CHECK");
       
       SERVICE_CHECKS = Collections.unmodifiableMap(serviceChecks);
   }

+ 3 - 5
ambari-server/src/main/java/org/apache/ambari/server/state/Service.java

@@ -18,14 +18,13 @@
 
 package org.apache.ambari.server.state;
 
-import java.util.Map;
-import java.util.concurrent.locks.ReadWriteLock;
-
 import com.google.inject.persist.Transactional;
-
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.controller.ServiceResponse;
 
+import java.util.Map;
+import java.util.concurrent.locks.ReadWriteLock;
+
 public interface Service {
 
   public String getName();
@@ -113,7 +112,6 @@ public interface Service {
     GANGLIA,
     ZOOKEEPER,
     PIG,
-    HCATALOG,
     FLUME,
     YARN,
     MAPREDUCE2

+ 130 - 60
ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java

@@ -18,74 +18,30 @@
 
 package org.apache.ambari.server.upgrade;
 
-import java.lang.reflect.Type;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.Date;
-
-import javax.persistence.EntityManager;
-import javax.persistence.TypedQuery;
-import javax.persistence.criteria.CriteriaBuilder;
-import javax.persistence.criteria.CriteriaQuery;
-import javax.persistence.criteria.Expression;
-import javax.persistence.criteria.Predicate;
-import javax.persistence.criteria.Root;
-
 import com.google.common.reflect.TypeToken;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
-import org.apache.ambari.server.orm.dao.ClusterDAO;
-import org.apache.ambari.server.orm.dao.DaoUtils;
-import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
-import org.apache.ambari.server.orm.dao.KeyValueDAO;
-import org.apache.ambari.server.orm.dao.PermissionDAO;
-import org.apache.ambari.server.orm.dao.PrincipalDAO;
-import org.apache.ambari.server.orm.dao.PrincipalTypeDAO;
-import org.apache.ambari.server.orm.dao.PrivilegeDAO;
-import org.apache.ambari.server.orm.dao.ResourceDAO;
-import org.apache.ambari.server.orm.dao.ResourceTypeDAO;
-import org.apache.ambari.server.orm.dao.ConfigGroupConfigMappingDAO;
-import org.apache.ambari.server.orm.dao.UserDAO;
-import org.apache.ambari.server.orm.dao.ViewDAO;
-import org.apache.ambari.server.orm.dao.ViewInstanceDAO;
-import org.apache.ambari.server.orm.entities.ClusterEntity;
-import org.apache.ambari.server.orm.entities.HostRoleCommandEntity;
-import org.apache.ambari.server.orm.entities.ConfigGroupConfigMappingEntity;
-import org.apache.ambari.server.orm.entities.ClusterConfigEntity;
-import org.apache.ambari.server.orm.entities.HostRoleCommandEntity_;
-import org.apache.ambari.server.orm.entities.KeyValueEntity;
-import org.apache.ambari.server.orm.entities.PermissionEntity;
-import org.apache.ambari.server.orm.entities.PrincipalEntity;
-import org.apache.ambari.server.orm.entities.PrincipalTypeEntity;
-import org.apache.ambari.server.orm.entities.PrivilegeEntity;
-import org.apache.ambari.server.orm.entities.ResourceEntity;
-import org.apache.ambari.server.orm.entities.ResourceTypeEntity;
-import org.apache.ambari.server.orm.entities.UserEntity;
-import org.apache.ambari.server.orm.entities.ViewEntity;
-import org.apache.ambari.server.orm.entities.ViewInstanceEntity;
-import org.apache.ambari.server.state.Cluster;
-import org.apache.ambari.server.state.Clusters;
-import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigHelper;
+import org.apache.ambari.server.orm.dao.*;
+import org.apache.ambari.server.orm.entities.*;
+import org.apache.ambari.server.state.*;
 import org.apache.ambari.server.utils.StageUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.inject.Inject;
-import com.google.inject.Injector;
+import javax.persistence.EntityManager;
+import javax.persistence.TypedQuery;
+import javax.persistence.criteria.*;
+import java.lang.reflect.Type;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.*;
+import java.util.Map.Entry;
 
 /**
  * Upgrade catalog for version 1.7.0.
@@ -532,6 +488,9 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
   @Override
   protected void executeDMLUpdates() throws AmbariException, SQLException {
     // Update historic records with the log paths, but only enough so as to not prolong the upgrade process
+    moveHcatalogIntoHiveService();
+    moveWebHcatIntoHiveService();
+
     executeInTransaction(new Runnable() {
       @Override
       public void run() {
@@ -597,6 +556,117 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
     moveConfigGroupsGlobalToEnv();
   }
 
+  public void moveHcatalogIntoHiveService() throws AmbariException {
+    final String serviceName = "HIVE";
+    final String serviceNameToBeDeleted = "HCATALOG";
+    final String componentName = "HCAT";
+    moveComponentsIntoService(serviceName, serviceNameToBeDeleted, componentName);
+  }
+
+  private void moveWebHcatIntoHiveService() throws AmbariException {
+    final String serviceName = "HIVE";
+    final String serviceNameToBeDeleted = "WEBHCAT";
+    final String componentName = "WEBHCAT_SERVER";
+    moveComponentsIntoService(serviceName, serviceNameToBeDeleted, componentName);
+  }
+
+  private void moveComponentsIntoService(String serviceName, String serviceNameToBeDeleted, String componentName) throws AmbariException {
+    /**
+     * 1. ADD servicecomponentdesiredstate: Add HCAT HIVE entry:
+     * 2. Update hostcomponentdesiredstate: service_name to HIVE where service_name is HCATALOG:
+     * 3. Update hostcomponentstate: service_name to HIVE where service_name is HCATALOG:
+     * 4. DELETE servicecomponentdesiredstate: where component_name is HCAT and service_name is HCATALOG :
+     * 5. Delete servicedesiredstate where  service_name is HCATALOG:
+     * 6. Delete clusterservices where service_name is  HCATALOG:
+     */
+    ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);
+    ClusterServiceDAO clusterServiceDAO = injector.getInstance(ClusterServiceDAO.class);
+    ServiceDesiredStateDAO serviceDesiredStateDAO = injector.getInstance(ServiceDesiredStateDAO.class);
+    ServiceComponentDesiredStateDAO serviceComponentDesiredStateDAO = injector.getInstance(ServiceComponentDesiredStateDAO.class);
+    HostComponentDesiredStateDAO hostComponentDesiredStateDAO = injector.getInstance(HostComponentDesiredStateDAO.class);
+    HostComponentStateDAO hostComponentStateDAO = injector.getInstance(HostComponentStateDAO.class);
+
+    List<ClusterEntity> clusterEntities = clusterDAO.findAll();
+    for (final ClusterEntity clusterEntity : clusterEntities) {
+      ServiceComponentDesiredStateEntityPK pkHCATInHcatalog = new ServiceComponentDesiredStateEntityPK();
+      pkHCATInHcatalog.setComponentName(componentName);
+      pkHCATInHcatalog.setClusterId(clusterEntity.getClusterId());
+      pkHCATInHcatalog.setServiceName(serviceNameToBeDeleted);
+      ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntityToDelete = serviceComponentDesiredStateDAO.findByPK(pkHCATInHcatalog);
+
+      if (serviceComponentDesiredStateEntityToDelete == null)
+        continue;
+
+      ServiceDesiredStateEntityPK serviceDesiredStateEntityPK = new ServiceDesiredStateEntityPK();
+      serviceDesiredStateEntityPK.setClusterId(clusterEntity.getClusterId());
+      serviceDesiredStateEntityPK.setServiceName(serviceNameToBeDeleted);
+      ServiceDesiredStateEntity serviceDesiredStateEntity = serviceDesiredStateDAO.findByPK(serviceDesiredStateEntityPK);
+
+      ClusterServiceEntityPK clusterServiceEntityToBeDeletedPK = new ClusterServiceEntityPK();
+      clusterServiceEntityToBeDeletedPK.setClusterId(clusterEntity.getClusterId());
+      clusterServiceEntityToBeDeletedPK.setServiceName(serviceNameToBeDeleted);
+      ClusterServiceEntity clusterServiceEntityToBeDeleted = clusterServiceDAO.findByPK(clusterServiceEntityToBeDeletedPK);
+
+      ClusterServiceEntityPK clusterServiceEntityPK = new ClusterServiceEntityPK();
+      clusterServiceEntityPK.setClusterId(clusterEntity.getClusterId());
+      clusterServiceEntityPK.setServiceName(serviceName);
+
+
+      ClusterServiceEntity clusterServiceEntity = clusterServiceDAO.findByPK(clusterServiceEntityPK);
+
+      ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntity = new ServiceComponentDesiredStateEntity();
+      serviceComponentDesiredStateEntity.setServiceName(serviceName);
+      serviceComponentDesiredStateEntity.setComponentName(serviceComponentDesiredStateEntityToDelete.getComponentName());
+      serviceComponentDesiredStateEntity.setClusterId(clusterEntity.getClusterId());
+      serviceComponentDesiredStateEntity.setDesiredStackVersion(serviceComponentDesiredStateEntityToDelete.getDesiredStackVersion());
+      serviceComponentDesiredStateEntity.setDesiredState(serviceComponentDesiredStateEntityToDelete.getDesiredState());
+      serviceComponentDesiredStateEntity.setClusterServiceEntity(clusterServiceEntity);
+      //serviceComponentDesiredStateDAO.create(serviceComponentDesiredStateEntity);
+
+      Iterator<HostComponentDesiredStateEntity> hostComponentDesiredStateIterator = serviceComponentDesiredStateEntityToDelete.getHostComponentDesiredStateEntities().iterator();
+      Iterator<HostComponentStateEntity> hostComponentStateIterator = serviceComponentDesiredStateEntityToDelete.getHostComponentStateEntities().iterator();
+
+      while (hostComponentDesiredStateIterator.hasNext()) {
+        HostComponentDesiredStateEntity hcDesiredStateEntityToBeDeleted = hostComponentDesiredStateIterator.next();
+        HostComponentDesiredStateEntity hostComponentDesiredStateEntity = new HostComponentDesiredStateEntity();
+        hostComponentDesiredStateEntity.setClusterId(clusterEntity.getClusterId());
+        hostComponentDesiredStateEntity.setComponentName(hcDesiredStateEntityToBeDeleted.getComponentName());
+        hostComponentDesiredStateEntity.setDesiredStackVersion(hcDesiredStateEntityToBeDeleted.getDesiredStackVersion());
+        hostComponentDesiredStateEntity.setDesiredState(hcDesiredStateEntityToBeDeleted.getDesiredState());
+        hostComponentDesiredStateEntity.setHostName(hcDesiredStateEntityToBeDeleted.getHostName());
+        hostComponentDesiredStateEntity.setHostEntity(hcDesiredStateEntityToBeDeleted.getHostEntity());
+        hostComponentDesiredStateEntity.setAdminState(hcDesiredStateEntityToBeDeleted.getAdminState());
+        hostComponentDesiredStateEntity.setMaintenanceState(hcDesiredStateEntityToBeDeleted.getMaintenanceState());
+        hostComponentDesiredStateEntity.setRestartRequired(hcDesiredStateEntityToBeDeleted.isRestartRequired());
+        hostComponentDesiredStateEntity.setServiceName(serviceName);
+        hostComponentDesiredStateEntity.setServiceComponentDesiredStateEntity(serviceComponentDesiredStateEntity);
+        hostComponentDesiredStateDAO.merge(hostComponentDesiredStateEntity);
+        hostComponentDesiredStateDAO.remove(hcDesiredStateEntityToBeDeleted);
+      }
+
+      while (hostComponentStateIterator.hasNext()) {
+        HostComponentStateEntity hcStateToBeDeleted = hostComponentStateIterator.next();
+        HostComponentStateEntity hostComponentStateEntity = new HostComponentStateEntity();
+        hostComponentStateEntity.setClusterId(clusterEntity.getClusterId());
+        hostComponentStateEntity.setComponentName(hcStateToBeDeleted.getComponentName());
+        hostComponentStateEntity.setCurrentStackVersion(hcStateToBeDeleted.getCurrentStackVersion());
+        hostComponentStateEntity.setCurrentState(hcStateToBeDeleted.getCurrentState());
+        hostComponentStateEntity.setHostName(hcStateToBeDeleted.getHostName());
+        hostComponentStateEntity.setHostEntity(hcStateToBeDeleted.getHostEntity());
+        hostComponentStateEntity.setServiceName(serviceName);
+        hostComponentStateEntity.setServiceComponentDesiredStateEntity(serviceComponentDesiredStateEntity);
+        hostComponentStateDAO.merge(hostComponentStateEntity);
+        hostComponentStateDAO.remove(hcStateToBeDeleted);
+      }
+      serviceComponentDesiredStateEntity.setClusterServiceEntity(clusterServiceEntity);
+      serviceComponentDesiredStateDAO.merge(serviceComponentDesiredStateEntity);
+      serviceComponentDesiredStateDAO.remove(serviceComponentDesiredStateEntityToDelete);
+      serviceDesiredStateDAO.remove(serviceDesiredStateEntity);
+      clusterServiceDAO.remove(clusterServiceEntityToBeDeleted);
+    }
+  }
+
+
   private void moveConfigGroupsGlobalToEnv() throws AmbariException {
     final ConfigGroupConfigMappingDAO confGroupConfMappingDAO = injector.getInstance(ConfigGroupConfigMappingDAO.class);
     ConfigHelper configHelper = injector.getInstance(ConfigHelper.class);
@@ -905,7 +975,7 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
     if (clusterMap != null && !clusterMap.isEmpty()) {
       for (final Cluster cluster : clusterMap.values()) {
         Set<String> configTypes = configHelper.findConfigTypesByPropertyName(cluster.getCurrentStackVersion(),
-                CONTENT_FIELD_NAME, cluster.getClusterName());
+            CONTENT_FIELD_NAME, cluster.getClusterName());
 
         for(String configType:configTypes) {
           if(!configType.endsWith(ENV_CONFIGS_POSTFIX)) {

+ 0 - 2
ambari-server/src/main/resources/custom_actions/validate_configs.py

@@ -131,7 +131,6 @@ PROPERTIES_TO_CHECK = {
   "HIVE_CLIENT": {
     "hive-env": ["hive_log_dir", "hive_pid_dir"]
   },
-  #HCATALOG
   "HCAT": {
     "hive-env": ["hcat_log_dir", "hcat_pid_dir"]
   },
@@ -297,7 +296,6 @@ USERS_TO_GROUP_MAPPING = {
       "hive_user": "hive_user"
     }
   },
-  #HCATALOG
   "HCAT": {
     "hive-env": {
       "hive_user": "hive_user"

+ 1 - 3
ambari-server/src/main/resources/stacks/HDP/1.3.2/role_command_order.json

@@ -20,10 +20,8 @@
         "WEBHCAT_SERVER-START"],
     "MAPREDUCE_SERVICE_CHECK-SERVICE_CHECK": ["JOBTRACKER-START", "TASKTRACKER-START"],
     "OOZIE_SERVICE_CHECK-SERVICE_CHECK": ["OOZIE_SERVER-START"],
-    "WEBHCAT_SERVICE_CHECK-SERVICE_CHECK": ["WEBHCAT_SERVER-START"],
     "HBASE_SERVICE_CHECK-SERVICE_CHECK": ["HBASE_MASTER-START", "HBASE_REGIONSERVER-START"],
-    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START"],
-    "HCAT_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START"],
+    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START", "WEBHCAT_SERVER-START"],
     "PIG_SERVICE_CHECK-SERVICE_CHECK": ["JOBTRACKER-START", "TASKTRACKER-START"],
     "SQOOP_SERVICE_CHECK-SERVICE_CHECK": ["JOBTRACKER-START", "TASKTRACKER-START"],
     "ZOOKEEPER_SERVICE_CHECK-SERVICE_CHECK": ["ZOOKEEPER_SERVER-START"],

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/configuration/webhcat-env.xml → ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/configuration/webhcat-env.xml

@@ -27,7 +27,7 @@
     <description>webhcat-env.sh content</description>
     <value>
 # The file containing the running pid
-PID_FILE={{pid_file}}
+PID_FILE={{webhcat_pid_file}}
 
 TEMPLETON_LOG_DIR={{templeton_log_dir}}/
 

+ 0 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/configuration/webhcat-site.xml → ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/configuration/webhcat-site.xml


+ 82 - 72
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/metainfo.xml

@@ -80,6 +80,49 @@
           </commandScript>
         </component>
 
+        <component>
+          <name>WEBHCAT_SERVER</name>
+          <displayName>WebHCat Server</displayName>
+          <category>MASTER</category>
+          <cardinality>1</cardinality>
+          <dependencies>
+            <dependency>
+              <name>HDFS/HDFS_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>MAPREDUCE/MAPREDUCE_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>ZOOKEEPER/ZOOKEEPER_SERVER</name>
+              <scope>cluster</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+                <co-locate>HIVE/WEBHCAT_SERVER</co-locate>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>ZOOKEEPER/ZOOKEEPER_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+          <commandScript>
+            <script>scripts/webhcat_server.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+        </component>
+
         <component>
           <name>HIVE_CLIENT</name>
           <displayName>Hive Client</displayName>
@@ -112,68 +155,6 @@
             </configFile>            
           </configFiles>
         </component>
-      </components>
-
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>hive</name>
-            </package>
-            <package>
-              <name>mysql-connector-java</name>
-            </package>
-            <package>
-              <name>mysql</name>
-            </package>
-          </packages>
-        </osSpecific>
-        <osSpecific>
-          <osFamily>redhat5,redhat6</osFamily>
-          <packages>
-            <package>
-              <name>mysql-server</name>
-            </package>
-          </packages>
-        </osSpecific>
-        <osSpecific>
-          <osFamily>suse11</osFamily>
-          <packages>
-            <package>
-              <name>mysql-client</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-      
-      <requiredServices>
-        <service>MAPREDUCE</service>
-        <service>ZOOKEEPER</service>
-      </requiredServices>
-
-      <configuration-dependencies>
-        <config-type>hive-site</config-type>
-        <config-type>hive-env</config-type>
-        <config-type>hive-log4j</config-type>
-        <config-type>hive-exec-log4j</config-type>
-      </configuration-dependencies>
-    </service>
-
-    <service>
-      <name>HCATALOG</name>
-      <displayName>HCatalog</displayName>
-      <comment>A table and storage management layer for Hadoop that enables users with different data processing tools
-        to more easily read and write data on the grid.
-      </comment>
-      <version>0.11.0.1.3.3.0</version>
-      <components>
         <component>
           <name>HCAT</name>
           <displayName>HCat</displayName>
@@ -206,16 +187,49 @@
           </configFiles>
         </component>
       </components>
+
       <osSpecifics>
         <osSpecific>
           <osFamily>any</osFamily>
           <packages>
+            <package>
+              <name>hive</name>
+            </package>
             <package>
               <name>hcatalog</name>
             </package>
+            <package>
+              <name>webhcat-tar-hive</name>
+            </package>
+            <package>
+              <name>webhcat-tar-pig</name>
+            </package>
+            <package>
+              <name>mysql-connector-java</name>
+            </package>
+            <package>
+              <name>mysql</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>redhat5,redhat6</osFamily>
+          <packages>
+            <package>
+              <name>mysql-server</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>suse11</osFamily>
+          <packages>
+            <package>
+              <name>mysql-client</name>
+            </package>
           </packages>
         </osSpecific>
       </osSpecifics>
+
       <commandScript>
         <script>scripts/service_check.py</script>
         <scriptType>PYTHON</scriptType>
@@ -223,22 +237,18 @@
       </commandScript>
       
       <requiredServices>
-        <service>HIVE</service>
+        <service>MAPREDUCE</service>
+        <service>ZOOKEEPER</service>
       </requiredServices>
 
       <configuration-dependencies>
         <config-type>hive-site</config-type>
         <config-type>hive-env</config-type>
-      </configuration-dependencies>
-
-      <excluded-config-types>
-        <config-type>hive-env</config-type>
-        <config-type>hive-site</config-type>
-        <config-type>hive-exec-log4j</config-type>
         <config-type>hive-log4j</config-type>
-      </excluded-config-types>
-
+        <config-type>hive-exec-log4j</config-type>
+        <config-type>webhcat-site</config-type>
+        <config-type>webhcat-env</config-type>
+      </configuration-dependencies>
     </service>
-
   </services>
 </metainfo>

+ 0 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/files/templetonSmoke.sh → ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/templetonSmoke.sh


+ 28 - 3
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py

@@ -64,7 +64,6 @@ smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 hive_metastore_keytab_path =  config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
 
 #hive_env
@@ -155,11 +154,37 @@ hive_hdfs_user_dir = format("/user/{hive_user}")
 hive_hdfs_user_mode = 0700
 #for create_hdfs_directory
 hostname = config["hostname"]
-hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+
+#################################################
+################## WebHCat ######################
+#################################################
+webhcat_env_sh_template = config['configurations']['webhcat-env']['content']
+
+config_dir = '/etc/hcatalog/conf'
+
+templeton_log_dir = config['configurations']['hive-env']['hcat_log_dir']
+templeton_pid_dir = status_params.templeton_pid_dir
+
+webhcat_pid_file = status_params.webhcat_pid_file
+
+templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
+
+
+webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
+
+webhcat_apps_dir = "/apps/webhcat"
+
+#hdfs directories
+hcat_hdfs_user_dir = format("/user/{hcat_user}")
+hcat_hdfs_user_mode = 0755
+webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
+webhcat_hdfs_user_mode = 0755
+#for create_hdfs_directory
+security_param = "true" if security_enabled else "false"
+
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
 #to create hdfs directory we need to call params.HdfsDirectory in code

+ 2 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/service_check.py

@@ -23,6 +23,7 @@ import socket
 import sys
 
 from hcat_service_check import hcat_service_check
+from webhcat_service_check import webhcat_service_check
 
 class HiveServiceCheck(Script):
   def service_check(self, env):
@@ -42,6 +43,7 @@ class HiveServiceCheck(Script):
       sys.exit(1)
 
     hcat_service_check()
+    webhcat_service_check()
 
 if __name__ == "__main__":
   HiveServiceCheck().execute()

+ 3 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/status_params.py

@@ -29,6 +29,9 @@ hive_metastore_pid = 'hive.pid'
 
 hcat_pid_dir = config['configurations']['hive-env']['hcat_pid_dir'] #hcat_pid_dir
 
+templeton_pid_dir = config['configurations']['hive-env']['hcat_pid_dir']
+webhcat_pid_file = format('{templeton_pid_dir}/webhcat.pid')
+
 if System.get_instance().os_family == "suse":
   daemon_name = 'mysql'
 else:

+ 0 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/webhcat.py → ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat.py


+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/webhcat_server.py → ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_server.py

@@ -47,7 +47,7 @@ class WebHCatServer(Script):
   def status(self, env):
     import status_params
     env.set_params(status_params)
-    check_process_status(status_params.pid_file)
+    check_process_status(status_params.webhcat_pid_file)
 
 if __name__ == "__main__":
   WebHCatServer().execute()

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/webhcat_service.py → ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service.py

@@ -27,7 +27,7 @@ def webhcat_service(action='start'):
 
   if action == 'start':
     demon_cmd = format('{cmd} start')
-    no_op_test = format('ls {pid_file} >/dev/null 2>&1 && ps `cat {pid_file}` >/dev/null 2>&1')
+    no_op_test = format('ls {webhcat_pid_file} >/dev/null 2>&1 && ps `cat {webhcat_pid_file}` >/dev/null 2>&1')
     Execute(demon_cmd,
             user=params.webhcat_user,
             not_if=no_op_test
@@ -37,4 +37,4 @@ def webhcat_service(action='start'):
     Execute(demon_cmd,
             user=params.webhcat_user
     )
-    Execute(format('rm -f {pid_file}'))
+    Execute(format('rm -f {webhcat_pid_file}'))

+ 19 - 3
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/status_params.py → ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service_check.py

@@ -20,7 +20,23 @@ limitations under the License.
 
 from resource_management import *
 
-config = Script.get_config()
+def webhcat_service_check():
+  import params
+  File(format("{tmp_dir}/templetonSmoke.sh"),
+       content= StaticFile('templetonSmoke.sh'),
+       mode=0755
+  )
+
+  cmd = format("{tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} {smokeuser} {smokeuser_keytab}"
+               " {security_param} {kinit_path_local}",
+               smokeuser_keytab=params.smoke_user_keytab if params.security_enabled else "no_keytab")
+
+  Execute(cmd,
+          tries=3,
+          try_sleep=5,
+          path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
+          logoutput=True)
+
+
+
 
-templeton_pid_dir = config['configurations']['hive-env']['hcat_pid_dir']
-pid_file = format('{templeton_pid_dir}/webhcat.pid')

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/metainfo.xml

@@ -52,7 +52,7 @@
               </auto-deploy>
             </dependency>
             <dependency>
-              <name>HCATALOG/HCAT</name>
+              <name>HIVE/HCAT</name>
               <scope>host</scope>
               <auto-deploy>
                 <enabled>true</enabled>

+ 0 - 6
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/templates/hadoop-servicegroups.cfg.j2

@@ -69,12 +69,6 @@ define servicegroup {
   alias  OOZIE Checks
 }
 {% endif %}
-{% if hostgroup_defs['webhcat-server'] %}
-define servicegroup {
-  servicegroup_name  WEBHCAT
-  alias  WEBHCAT Checks
-}
-{% endif %}
 {% if hostgroup_defs['nagios-server'] %}
 define servicegroup {
   servicegroup_name  NAGIOS

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/templates/hadoop-services.cfg.j2

@@ -566,7 +566,7 @@ define service {
         hostgroup_name          webhcat-server
         use                     hadoop-service
         service_description     WEBHCAT::WebHCat Server status
-        servicegroups           WEBHCAT 
+        servicegroups           HIVE
         {% if security_enabled %}
         check_command           check_templeton_status!{{ templeton_port }}!v1!{{ str(security_enabled).lower() }}!{{ nagios_keytab_path }}!{{ nagios_principal_name }}!{{ kinit_path_local }}
         {% else %}

+ 0 - 103
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/metainfo.xml

@@ -1,103 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>WEBHCAT</name>
-      <displayName>WebHCat</displayName>
-      <comment>Provides a REST-like web API for HCatalog and related Hadoop components.</comment>
-      <version>0.11.0.1.3.3.0</version>
-      <components>
-        <component>
-          <name>WEBHCAT_SERVER</name>
-          <displayName>WebHCat Server</displayName>
-          <category>MASTER</category>
-          <cardinality>1</cardinality>
-          <dependencies>
-            <dependency>
-              <name>HDFS/HDFS_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>MAPREDUCE/MAPREDUCE_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>ZOOKEEPER/ZOOKEEPER_SERVER</name>
-              <scope>cluster</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-                <co-locate>WEBHCAT/WEBHCAT_SERVER</co-locate>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>ZOOKEEPER/ZOOKEEPER_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-          </dependencies>
-          <commandScript>
-            <script>scripts/webhcat_server.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>600</timeout>
-          </commandScript>
-        </component>
-      </components>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>hcatalog</name>
-            </package>
-            <package>
-              <name>webhcat-tar-hive</name>
-            </package>
-            <package>
-              <name>webhcat-tar-pig</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-      
-      <requiredServices>
-        <service>ZOOKEEPER</service>
-        <service>HIVE</service>
-      </requiredServices>
-      
-      <configuration-dependencies>
-        <config-type>webhcat-site</config-type>
-        <config-type>webhcat-env</config-type>
-      </configuration-dependencies>
-    </service>
-  </services>
-</metainfo>

+ 0 - 20
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/__init__.py

@@ -1,20 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""

+ 0 - 78
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/params.py

@@ -1,78 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-from resource_management import *
-import status_params
-
-# server configurations
-config = Script.get_config()
-tmp_dir = Script.get_tmp_dir()
-
-hcat_user = config['configurations']['hive-env']['hcat_user']
-webhcat_user = config['configurations']['hive-env']['webhcat_user']
-webhcat_env_sh_template = config['configurations']['webhcat-env']['content']
-
-config_dir = '/etc/hcatalog/conf'
-
-templeton_log_dir = config['configurations']['hive-env']['hcat_log_dir']
-templeton_pid_dir = status_params.templeton_pid_dir
-
-pid_file = status_params.pid_file
-
-hadoop_conf_dir = config['configurations']['webhcat-site']['templeton.hadoop.conf.dir']
-templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
-
-hadoop_home = '/usr'
-user_group = config['configurations']['cluster-env']['user_group']
-
-webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
-
-webhcat_apps_dir = "/apps/webhcat"
-smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-smokeuser = config['configurations']['cluster-env']['smokeuser']
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
-
-#hdfs directories
-webhcat_apps_dir = "/apps/webhcat"
-hcat_hdfs_user_dir = format("/user/{hcat_user}")
-hcat_hdfs_user_mode = 0755
-webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
-webhcat_hdfs_user_mode = 0755
-#for create_hdfs_directory
-hostname = config["hostname"]
-security_param = "true" if security_enabled else "false"
-hadoop_conf_dir = "/etc/hadoop/conf"
-hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
-import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
-  security_enabled = security_enabled,
-  keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local
-)

+ 0 - 44
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/service_check.py

@@ -1,44 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-from resource_management import *
-
-class WebHCatServiceCheck(Script):
-  def service_check(self, env):
-    import params
-    env.set_params(params)
-
-    File(format("{tmp_dir}/templetonSmoke.sh"),
-         content= StaticFile('templetonSmoke.sh'),
-         mode=0755
-    )
-
-    cmd = format("sh {tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} {smokeuser} {smokeuser_keytab}"
-                 " {security_param} {kinit_path_local}",
-                 smokeuser_keytab=params.smoke_user_keytab if params.security_enabled else "no_keytab")
-
-    Execute(cmd,
-            tries=3,
-            try_sleep=5,
-            path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
-            logoutput=True)
-
-if __name__ == "__main__":
-  WebHCatServiceCheck().execute()

+ 1 - 3
ambari-server/src/main/resources/stacks/HDP/1.3/role_command_order.json

@@ -20,10 +20,8 @@
         "WEBHCAT_SERVER-START"],
     "MAPREDUCE_SERVICE_CHECK-SERVICE_CHECK": ["JOBTRACKER-START", "TASKTRACKER-START"],
     "OOZIE_SERVICE_CHECK-SERVICE_CHECK": ["OOZIE_SERVER-START"],
-    "WEBHCAT_SERVICE_CHECK-SERVICE_CHECK": ["WEBHCAT_SERVER-START"],
     "HBASE_SERVICE_CHECK-SERVICE_CHECK": ["HBASE_MASTER-START", "HBASE_REGIONSERVER-START"],
-    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START"],
-    "HCAT_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START"],
+    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START","WEBHCAT_SERVER-START"],
     "PIG_SERVICE_CHECK-SERVICE_CHECK": ["JOBTRACKER-START", "TASKTRACKER-START"],
     "SQOOP_SERVICE_CHECK-SERVICE_CHECK": ["JOBTRACKER-START", "TASKTRACKER-START"],
     "ZOOKEEPER_SERVICE_CHECK-SERVICE_CHECK": ["ZOOKEEPER_SERVER-START"],

+ 0 - 5
ambari-server/src/main/resources/stacks/HDP/1.3/services/HIVE/metainfo.xml

@@ -23,10 +23,5 @@
       <comment>Data warehouse system for ad-hoc queries &amp; analysis of large datasets and table &amp; storage management service</comment>
       <version>0.11.0.1.3</version>
     </service>
-    <service>
-      <name>HCATALOG</name>
-      <comment>HCATALOG</comment>
-      <version>0.11.0.1.3</version>
-    </service>
   </services>
 </metainfo>

+ 1 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/role_command_order.json

@@ -21,10 +21,8 @@
         "WEBHCAT_SERVER-START", "FLUME_HANDLER-START"],
     "MAPREDUCE_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"],
     "OOZIE_SERVICE_CHECK-SERVICE_CHECK": ["OOZIE_SERVER-START", "MAPREDUCE2_SERVICE_CHECK-SERVICE_CHECK"],
-    "WEBHCAT_SERVICE_CHECK-SERVICE_CHECK": ["WEBHCAT_SERVER-START"],
     "HBASE_SERVICE_CHECK-SERVICE_CHECK": ["HBASE_MASTER-START", "HBASE_REGIONSERVER-START"],
-    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START"],
-    "HCAT_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START"],
+    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START", "WEBHCAT_SERVER-START"],
     "PIG_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"],
     "SQOOP_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"],
     "ZOOKEEPER_SERVICE_CHECK-SERVICE_CHECK": ["ZOOKEEPER_SERVER-START"],

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/configuration/webhcat-env.xml → ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/configuration/webhcat-env.xml

@@ -27,7 +27,7 @@
     <description>webhcat-env.sh content</description>
     <value>
 # The file containing the running pid
-PID_FILE={{pid_file}}
+PID_FILE={{webhcat_pid_file}}
 
 TEMPLETON_LOG_DIR={{templeton_log_dir}}/
 

+ 0 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/configuration/webhcat-site.xml → ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/configuration/webhcat-site.xml


+ 91 - 74
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/metainfo.xml

@@ -75,7 +75,55 @@
             <scriptType>PYTHON</scriptType>
           </commandScript>
         </component>
-
+        <component>
+          <name>WEBHCAT_SERVER</name>
+          <displayName>WebHCat Server</displayName>
+          <category>MASTER</category>
+          <cardinality>1</cardinality>
+          <dependencies>
+            <dependency>
+              <name>HDFS/HDFS_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>MAPREDUCE2/MAPREDUCE2_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>ZOOKEEPER/ZOOKEEPER_SERVER</name>
+              <scope>cluster</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+                <co-locate>HIVE/WEBHCAT_SERVER</co-locate>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>ZOOKEEPER/ZOOKEEPER_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>YARN/YARN_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+          <commandScript>
+            <script>scripts/webhcat_server.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+        </component>
         <component>
           <name>MYSQL_SERVER</name>
           <displayName>MySQL Server</displayName>
@@ -119,6 +167,37 @@
             </configFile>                         
           </configFiles>
         </component>
+        <component>
+          <name>HCAT</name>
+          <displayName>HCat</displayName>
+          <category>CLIENT</category>
+          <commandScript>
+            <script>scripts/hcat_client.py</script>
+            <scriptType>PYTHON</scriptType>
+          </commandScript>
+          <configFiles>
+            <configFile>
+              <type>xml</type>
+              <fileName>hive-site.xml</fileName>
+              <dictionaryName>hive-site</dictionaryName>
+            </configFile>
+            <configFile>
+              <type>env</type>
+              <fileName>hive-env.sh</fileName>
+              <dictionaryName>hive-env</dictionaryName>
+            </configFile>
+            <configFile>
+              <type>env</type>
+              <fileName>hive-log4j.properties</fileName>
+              <dictionaryName>hive-log4j</dictionaryName>
+            </configFile>
+            <configFile>
+              <type>env</type>
+              <fileName>hive-exec-log4j.properties</fileName>
+              <dictionaryName>hive-exec-log4j</dictionaryName>
+            </configFile>
+          </configFiles>
+        </component>
       </components>
 
       <osSpecifics>
@@ -128,6 +207,15 @@
             <package>
               <name>hive</name>
             </package>
+            <package>
+              <name>hcatalog</name>
+            </package>
+            <package>
+              <name>webhcat-tar-hive</name>
+            </package>
+            <package>
+              <name>webhcat-tar-pig</name>
+            </package>
             <package>
               <name>mysql-connector-java</name>
             </package>
@@ -176,80 +264,9 @@
         <config-type>hive-log4j</config-type>
         <config-type>hive-exec-log4j</config-type>
         <config-type>hive-env</config-type>
+        <config-type>webhcat-site</config-type>
+        <config-type>webhcat-env</config-type>
       </configuration-dependencies>
     </service>
-
-    <service>
-      <name>HCATALOG</name>
-      <displayName>HCatalog</displayName>
-      <comment>A table and storage management layer for Hadoop that enables users with different data processing tools
-        to more easily read and write data on the grid.
-      </comment>
-      <version>0.12.0.2.0.6.0</version>
-      <components>
-        <component>
-          <name>HCAT</name>
-          <displayName>HCat</displayName>
-          <category>CLIENT</category>
-          <commandScript>
-            <script>scripts/hcat_client.py</script>
-            <scriptType>PYTHON</scriptType>
-          </commandScript>
-          <configFiles>
-            <configFile>
-              <type>xml</type>
-              <fileName>hive-site.xml</fileName>
-              <dictionaryName>hive-site</dictionaryName>
-            </configFile>
-            <configFile>
-              <type>env</type>
-              <fileName>hive-env.sh</fileName>
-              <dictionaryName>hive-env</dictionaryName>
-            </configFile>
-            <configFile>
-              <type>env</type>
-              <fileName>hive-log4j.properties</fileName>
-              <dictionaryName>hive-log4j</dictionaryName>
-            </configFile>
-            <configFile>
-              <type>env</type>
-              <fileName>hive-exec-log4j.properties</fileName>
-              <dictionaryName>hive-exec-log4j</dictionaryName>
-            </configFile>
-          </configFiles>
-        </component>
-      </components>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>hcatalog</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-      
-      <requiredServices>
-        <service>HIVE</service>
-      </requiredServices>
-      
-      <configuration-dependencies>
-        <config-type>hive-site</config-type>
-        <config-type>hive-env</config-type>
-      </configuration-dependencies>
-      <excluded-config-types>
-        <config-type>hive-env</config-type>
-        <config-type>hive-site</config-type>
-        <config-type>hive-exec-log4j</config-type>
-        <config-type>hive-log4j</config-type>
-      </excluded-config-types>
-    </service>
-
   </services>
 </metainfo>

+ 0 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/files/templetonSmoke.sh → ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/files/templetonSmoke.sh


+ 42 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py

@@ -36,6 +36,7 @@ if rpm_version is not None:
   hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
   hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
   hadoop_home = format('/usr/hdp/{rpm_version}/hadoop')
+  hadoop_streeming_jars = format("/usr/hdp/{rpm_version}/hadoop-mapreduce/hadoop-streaming-*.jar")
   hive_conf_dir = format('/usr/hdp/{rpm_version}/etc/hive/conf')
   hive_client_conf_dir = format('/usr/hdp/{rpm_version}/etc/hive/conf')
   hive_server_conf_dir = format('/usr/hdp/{rpm_version}/etc/hive/conf.server')
@@ -46,16 +47,21 @@ if rpm_version is not None:
 
   if str(hdp_stack_version).startswith('2.0'):
     hcat_conf_dir = format('/usr/hdp/{rpm_version}/etc/hcatalog/conf')
+    config_dir = format('/usr/hdp/{rpm_version}/etc/hcatalog/conf')
     hcat_lib = format('/usr/hdp/{rpm_version}/hive/hcatalog/share/hcatalog')
+    webhcat_bin_dir = format('/usr/hdp/{rpm_version}/hive/hcatalog/sbin')
   # for newer versions
   else:
     hcat_conf_dir = format('/usr/hdp/{rpm_version}/etc/hive-hcatalog/conf')
+    config_dir = format('/usr/hdp/{rpm_version}/etc/hive-webhcat/conf')
     hcat_lib = format('/usr/hdp/{rpm_version}/hive/hive-hcatalog/share/hcatalog')
+    webhcat_bin_dir = format('/usr/hdp/{rpm_version}/hive/hive-hcatalog/sbin')
 
 else:
   hadoop_conf_dir = "/etc/hadoop/conf"
   hadoop_bin_dir = "/usr/bin"
   hadoop_home = '/usr'
+  hadoop_streeming_jars = '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar'
   hive_conf_dir = "/etc/hive/conf"
   hive_bin = '/usr/lib/hive/bin'
   hive_lib = '/usr/lib/hive/lib/'
@@ -66,11 +72,15 @@ else:
 
   if str(hdp_stack_version).startswith('2.0'):
     hcat_conf_dir = '/etc/hcatalog/conf'
+    config_dir = '/etc/hcatalog/conf'
     hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
+    webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
   # for newer versions
   else:
     hcat_conf_dir = '/etc/hive-hcatalog/conf'
+    config_dir = '/etc/hive-webhcat/conf'
     hcat_lib = '/usr/lib/hive-hcatalog/share/hcatalog'
+    webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
 
 execute_path = os.environ['PATH'] + os.pathsep + hive_bin
 hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
@@ -202,9 +212,7 @@ hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.wareho
 #for create_hdfs_directory
 hostname = config["hostname"]
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 
 # Tez libraries
 tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
@@ -228,6 +236,38 @@ if os.path.exists(mysql_jdbc_driver_jar):
 else:  
   hive_exclude_packages = []
 
+########################################################
+########### WebHCat related params #####################
+########################################################
+
+if str(config['hostLevelParams']['stack_version']).startswith('2.0'):
+  config_dir = '/etc/hcatalog/conf'
+  webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
+# for newer versions
+else:
+  config_dir = '/etc/hive-webhcat/conf'
+  webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
+
+webhcat_env_sh_template = config['configurations']['webhcat-env']['content']
+templeton_log_dir = config['configurations']['hive-env']['hcat_log_dir']
+templeton_pid_dir = status_params.hcat_pid_dir
+
+webhcat_pid_file = status_params.webhcat_pid_file
+
+templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
+
+
+webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
+
+webhcat_apps_dir = "/apps/webhcat"
+
+hcat_hdfs_user_dir = format("/user/{hcat_user}")
+hcat_hdfs_user_mode = 0755
+webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
+webhcat_hdfs_user_mode = 0755
+#for create_hdfs_directory
+security_param = "true" if security_enabled else "false"
+
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
 #to create hdfs directory we need to call params.HdfsDirectory in code

+ 2 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/service_check.py

@@ -23,6 +23,7 @@ import socket
 import sys
 
 from hcat_service_check import hcat_service_check
+from webhcat_service_check import webhcat_service_check
 
 class HiveServiceCheck(Script):
   def service_check(self, env):
@@ -42,6 +43,7 @@ class HiveServiceCheck(Script):
       sys.exit(1)
 
     hcat_service_check()
+    webhcat_service_check()
 
 if __name__ == "__main__":
   HiveServiceCheck().execute()

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/status_params.py

@@ -28,6 +28,7 @@ hive_pid = 'hive-server.pid'
 hive_metastore_pid = 'hive.pid'
 
 hcat_pid_dir = config['configurations']['hive-env']['hcat_pid_dir'] #hcat_pid_dir
+webhcat_pid_file = format('{hcat_pid_dir}/webhcat.pid')
 
 if System.get_instance().os_family == "suse" or System.get_instance().os_family == "ubuntu":
   daemon_name = 'mysql'

+ 0 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py → ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat.py


+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat_server.py → ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_server.py

@@ -47,7 +47,7 @@ class WebHCatServer(Script):
   def status(self, env):
     import status_params
     env.set_params(status_params)
-    check_process_status(status_params.pid_file)
+    check_process_status(status_params.webhcat_pid_file)
 
 if __name__ == "__main__":
   WebHCatServer().execute()

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat_service.py → ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_service.py

@@ -27,7 +27,7 @@ def webhcat_service(action='start'):
 
   if action == 'start':
     demon_cmd = format('{cmd} start')
-    no_op_test = format('ls {pid_file} >/dev/null 2>&1 && ps `cat {pid_file}` >/dev/null 2>&1')
+    no_op_test = format('ls {webhcat_pid_file} >/dev/null 2>&1 && ps `cat {webhcat_pid_file}` >/dev/null 2>&1')
     Execute(demon_cmd,
             user=params.webhcat_user,
             not_if=no_op_test
@@ -37,4 +37,4 @@ def webhcat_service(action='start'):
     Execute(demon_cmd,
             user=params.webhcat_user
     )
-    Execute(format('rm -f {pid_file}'))
+    Execute(format('rm -f {webhcat_pid_file}'))

+ 18 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/status_params.py → ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_service_check.py

@@ -20,7 +20,22 @@ limitations under the License.
 
 from resource_management import *
 
-config = Script.get_config()
+def webhcat_service_check():
+  import params
+  File(format("{tmp_dir}/templetonSmoke.sh"),
+       content= StaticFile('templetonSmoke.sh'),
+       mode=0755
+  )
+
+  cmd = format("{tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} {smokeuser} {smokeuser_keytab}"
+               " {security_param} {kinit_path_local}",
+               smokeuser_keytab=params.smoke_user_keytab if params.security_enabled else "no_keytab")
+
+  Execute(cmd,
+          tries=3,
+          try_sleep=5,
+          path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
+          logoutput=True)
+
+
 
-templeton_pid_dir = config['configurations']['hive-env']['hcat_pid_dir']
-pid_file = format('{templeton_pid_dir}/webhcat.pid')

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/metainfo.xml

@@ -59,7 +59,7 @@
               </auto-deploy>
             </dependency>
             <dependency>
-              <name>HCATALOG/HCAT</name>
+              <name>HIVE/HCAT</name>
               <scope>host</scope>
               <auto-deploy>
                 <enabled>true</enabled>

+ 1 - 7
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/templates/hadoop-servicegroups.cfg.j2

@@ -54,12 +54,6 @@ define servicegroup {
   alias  OOZIE Checks
 }
 {% endif %}
-{% if hostgroup_defs['webhcat-server'] %}
-define servicegroup {
-  servicegroup_name  WEBHCAT
-  alias  WEBHCAT Checks
-}
-{% endif %}
 {% if hostgroup_defs['nagios-server'] %}
 define servicegroup {
   servicegroup_name  NAGIOS
@@ -72,7 +66,7 @@ define servicegroup {
   alias  GANGLIA Checks
 }
 {% endif %}
-{% if hostgroup_defs['hiveserver'] %}
+{% if hostgroup_defs['hiveserver'] or hostgroup_defs['webhcat-server'] %}
 define servicegroup {
   servicegroup_name  HIVE
   alias  HIVE Checks

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/templates/hadoop-services.cfg.j2

@@ -729,7 +729,7 @@ define service {
         hostgroup_name          webhcat-server
         use                     hadoop-service
         service_description     WEBHCAT::WebHCat Server status
-        servicegroups           WEBHCAT 
+        servicegroups           HIVE
         {% if security_enabled %}
         check_command           check_templeton_status!{{ templeton_port }}!v1!{{ str(security_enabled).lower() }}!{{ nagios_keytab_path }}!{{ nagios_principal_name }}!{{ kinit_path_local }}
         {% else %}

+ 0 - 110
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml

@@ -1,110 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>WEBHCAT</name>
-      <displayName>WebHCat</displayName>
-      <comment>Provides a REST-like web API for HCatalog and related Hadoop components.</comment>
-      <version>0.12.0.2.0</version>
-      <components>
-        <component>
-          <name>WEBHCAT_SERVER</name>
-          <displayName>WebHCat Server</displayName>
-          <category>MASTER</category>
-          <cardinality>1</cardinality>
-          <dependencies>
-            <dependency>
-              <name>HDFS/HDFS_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>MAPREDUCE2/MAPREDUCE2_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>ZOOKEEPER/ZOOKEEPER_SERVER</name>
-              <scope>cluster</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-                <co-locate>WEBHCAT/WEBHCAT_SERVER</co-locate>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>ZOOKEEPER/ZOOKEEPER_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>YARN/YARN_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-          </dependencies>
-          <commandScript>
-            <script>scripts/webhcat_server.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>600</timeout>
-          </commandScript>
-        </component>
-      </components>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>hcatalog</name>
-            </package>
-            <package>
-              <name>webhcat-tar-hive</name>
-            </package>
-            <package>
-              <name>webhcat-tar-pig</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-      
-      <requiredServices>
-        <service>HIVE</service>
-        <service>ZOOKEEPER</service>
-      </requiredServices>
-      
-      <configuration-dependencies>
-        <config-type>webhcat-site</config-type>
-        <config-type>webhcat-env</config-type>
-      </configuration-dependencies>
-    </service>
-  </services>
-</metainfo>

+ 0 - 20
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/__init__.py

@@ -1,20 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""

+ 0 - 102
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py

@@ -1,102 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-from resource_management import *
-import status_params
-
-# server configurations
-config = Script.get_config()
-tmp_dir = Script.get_tmp_dir()
-
-#RPM versioning support
-rpm_version = default("/configurations/hadoop-env/rpm_version", None)
-
-#hadoop params
-hdp_stack_version = config['hostLevelParams']['stack_version']
-if rpm_version is not None:
-  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
-  hadoop_home = format('/usr/hdp/{rpm_version}/hadoop')
-  hadoop_streeming_jars = format("/usr/hdp/{rpm_version}/hadoop-mapreduce/hadoop-streaming-*.jar")
-  if str(hdp_stack_version).startswith('2.0'):
-    config_dir = format('/usr/hdp/{rpm_version}/etc/hcatalog/conf')
-    webhcat_bin_dir = format('/usr/hdp/{rpm_version}/hive/hcatalog/sbin')
-  # for newer versions
-  else:
-    config_dir = format('/usr/hdp/{rpm_version}/etc/hive-webhcat/conf')
-    webhcat_bin_dir = format('/usr/hdp/{rpm_version}/hive/hive-hcatalog/sbin')
-else:
-  hadoop_bin_dir = "/usr/bin"
-  hadoop_home = '/usr'
-  hadoop_streeming_jars = '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar'
-  if str(hdp_stack_version).startswith('2.0'):
-    config_dir = '/etc/hcatalog/conf'
-    webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
-  # for newer versions
-  else:
-    config_dir = '/etc/hive-webhcat/conf'
-    webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
-
-hcat_user = config['configurations']['hive-env']['hcat_user']
-webhcat_user = config['configurations']['hive-env']['webhcat_user']
-
-webhcat_env_sh_template = config['configurations']['webhcat-env']['content']
-templeton_log_dir = config['configurations']['hive-env']['hcat_log_dir']
-templeton_pid_dir = status_params.templeton_pid_dir
-
-pid_file = status_params.pid_file
-
-hadoop_conf_dir = config['configurations']['webhcat-site']['templeton.hadoop.conf.dir']
-templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
-
-user_group = config['configurations']['cluster-env']['user_group']
-
-webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
-
-webhcat_apps_dir = "/apps/webhcat"
-smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-smokeuser = config['configurations']['cluster-env']['smokeuser']
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
-
-hcat_hdfs_user_dir = format("/user/{hcat_user}")
-hcat_hdfs_user_mode = 0755
-webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
-webhcat_hdfs_user_mode = 0755
-webhcat_apps_dir = "/apps/webhcat"
-#for create_hdfs_directory
-hostname = config["hostname"]
-security_param = "true" if security_enabled else "false"
-hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
-import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
-  security_enabled = security_enabled,
-  keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
-)

+ 0 - 45
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/service_check.py

@@ -1,45 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-from resource_management import *
-
-class WebHCatServiceCheck(Script):
-  def service_check(self, env):
-    import params
-
-    env.set_params(params)
-
-    File(format("{tmp_dir}/templetonSmoke.sh"),
-         content= StaticFile('templetonSmoke.sh'),
-         mode=0755
-    )
-
-    cmd = format("{tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} {smokeuser} {smokeuser_keytab}"
-                 " {security_param} {kinit_path_local}",
-                 smokeuser_keytab=params.smoke_user_keytab if params.security_enabled else "no_keytab")
-
-    Execute(cmd,
-            tries=3,
-            try_sleep=5,
-            path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
-            logoutput=True)
-
-if __name__ == "__main__":
-  WebHCatServiceCheck().execute()

+ 1 - 3
ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/role_command_order.json

@@ -26,10 +26,8 @@
         "WEBHCAT_SERVER-START", "FLUME_HANDLER-START"],
     "MAPREDUCE_SERVICE_CHECK-SERVICE_CHECK": ["RESOURCEMANAGER-START", "NODEMANAGER-START"],
     "OOZIE_SERVICE_CHECK-SERVICE_CHECK": ["OOZIE_SERVER-START"],
-    "WEBHCAT_SERVICE_CHECK-SERVICE_CHECK": ["WEBHCAT_SERVER-START"],
     "HBASE_SERVICE_CHECK-SERVICE_CHECK": ["HBASE_MASTER-START", "HBASE_REGIONSERVER-START"],
-    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START"],
-    "HCAT_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START"],
+    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START", "WEBHCAT_SERVER-START"],
     "PIG_SERVICE_CHECK-SERVICE_CHECK": ["RESOURCEMANAGER-START", "NODEMANAGER-START"],
     "SQOOP_SERVICE_CHECK-SERVICE_CHECK": ["RESOURCEMANAGER-START", "NODEMANAGER-START"],
     "ZOOKEEPER_SERVICE_CHECK-SERVICE_CHECK": ["ZOOKEEPER_SERVER-START"],

+ 58 - 42
ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/HIVE/metainfo.xml

@@ -23,62 +23,78 @@
       <comment>Data warehouse system for ad-hoc queries &amp; analysis of large datasets and table &amp; storage management service</comment>
       <version>0.13.0.2.1</version>
 
-      <configuration-dependencies>
-        <config-type>hive-site</config-type>
-        <config-type>hive-log4j</config-type>
-        <config-type>hive-exec-log4j</config-type>
-        <config-type>global</config-type>
-        <config-type>mapred-site</config-type>
-      </configuration-dependencies>
-    </service>
-
-    <service>
-      <name>HCATALOG</name>
-      <comment>This is comment for HCATALOG service</comment>
-      <version>0.12.0.2.1</version>
       <components>
         <component>
-          <name>HCAT</name>
-          <category>CLIENT</category>
-          <commandScript>
-            <script>scripts/hcat_client.py</script>
-            <scriptType>PYTHON</scriptType>
-          </commandScript>
-          <configFiles>
-            <configFile>
-              <type>xml</type>
-              <fileName>hive-site.xml</fileName>
-              <dictionaryName>hive-site</dictionaryName>
-            </configFile>
-            <configFile>
-              <type>env</type>
-              <fileName>hive-env.sh</fileName>
-              <dictionaryName>hive-env</dictionaryName>
-            </configFile>
-            <configFile>
-              <type>env</type>
-              <fileName>hive-log4j.properties</fileName>
-              <dictionaryName>hive-log4j</dictionaryName>
-            </configFile>
-            <configFile>
-              <type>env</type>
-              <fileName>hive-exec-log4j.properties</fileName>
-              <dictionaryName>hive-exec-log4j</dictionaryName>
-            </configFile>
-          </configFiles>
+          <name>HIVE_SERVER</name>
+          <dependencies>
+            <dependency>
+              <name>TEZ/TEZ_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
         </component>
       </components>
+
       <osSpecifics>
         <osSpecific>
           <osFamily>any</osFamily>
           <packages>
+            <package>
+              <name>hive</name>
+            </package>
             <package>
               <name>hive-hcatalog</name>
             </package>
+            <package>
+              <name>hive-webhcat</name>
+            </package>
+            <package>
+              <name>webhcat-tar-hive</name>
+            </package>
+            <package>
+              <name>webhcat-tar-pig</name>
+            </package>
+            <package>
+              <name>mysql-connector-java</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>redhat5,redhat6,suse11</osFamily>
+          <packages>
+            <package>
+              <name>mysql</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>redhat5,redhat6,ubuntu12</osFamily>
+          <packages>
+            <package>
+              <name>mysql-server</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>suse11</osFamily>
+          <packages>
+            <package>
+              <name>mysql-client</name>
+            </package>
           </packages>
         </osSpecific>
       </osSpecifics>
-    </service>
 
+      <configuration-dependencies>
+        <config-type>hive-site</config-type>
+        <config-type>hive-log4j</config-type>
+        <config-type>hive-exec-log4j</config-type>
+        <config-type>global</config-type>
+        <config-type>mapred-site</config-type>
+      </configuration-dependencies>
+    </service>
   </services>
 </metainfo>

+ 0 - 143
ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/WEBHCAT/configuration/webhcat-site.xml

@@ -1,143 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- 
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-
-<!-- The default settings for Templeton. -->
-<!-- Edit templeton-site.xml to change settings for your local -->
-<!-- install. -->
-
-<configuration supports_final="true">
-
-  <property>
-    <name>templeton.port</name>
-      <value>50111</value>
-    <description>The HTTP port for the main server.</description>
-  </property>
-
-  <property>
-    <name>templeton.hadoop.conf.dir</name>
-    <value>/etc/hadoop/conf</value>
-    <description>The path to the Hadoop configuration.</description>
-  </property>
-
-  <property>
-    <name>templeton.jar</name>
-    <value>/usr/lib/hcatalog/share/webhcat/svr/webhcat.jar</value>
-    <description>The path to the Templeton jar file.</description>
-  </property>
-
-  <property>
-    <name>templeton.libjars</name>
-    <value>/usr/lib/zookeeper/zookeeper.jar</value>
-    <description>Jars to add the the classpath.</description>
-  </property>
-
-
-  <property>
-    <name>templeton.hadoop</name>
-    <value>/usr/bin/hadoop</value>
-    <description>The path to the Hadoop executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.pig.archive</name>
-    <value>glusterfs:///apps/webhcat/pig.tar.gz</value>
-    <description>The path to the Pig archive.</description>
-  </property>
-
-  <property>
-    <name>templeton.pig.path</name>
-    <value>pig.tar.gz/pig/bin/pig</value>
-    <description>The path to the Pig executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hcat</name>
-    <value>/usr/bin/hcat</value>
-    <description>The path to the hcatalog executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.archive</name>
-    <value>glusterfs:///apps/webhcat/hive.tar.gz</value>
-    <description>The path to the Hive archive.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.home</name>
-    <value>hive.tar.gz/hive</value>
-    <description>The path to the Hive home within the tar. Has no effect if templeton.hive.archive is not set.</description>
-  </property>
-
-  <property>
-    <name>templeton.hcat.home</name>
-    <value>hive.tar.gz/hive/hcatalog</value>
-    <description>The path to the HCat home within the tar. Has no effect if templeton.hive.archive is not set.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.path</name>
-    <value>hive.tar.gz/hive/bin/hive</value>
-    <description>The path to the Hive executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.properties</name>
-    <value>hive.metastore.local=false, hive.metastore.uris=thrift://localhost:9933, hive.metastore.sasl.enabled=false</value>
-    <description>Properties to set when running hive.</description>
-  </property>
-
-  <property>
-    <name>templeton.zookeeper.hosts</name>
-    <value>localhost:2181</value>
-    <description>ZooKeeper servers, as comma separated host:port pairs</description>
-  </property>
-
-  <property>
-    <name>templeton.storage.class</name>
-    <value>org.apache.hive.hcatalog.templeton.tool.ZooKeeperStorage</value>
-    <description>The class to use as storage</description>
-  </property>
-
-  <property>
-   <name>templeton.override.enabled</name>
-   <value>false</value>
-   <description>
-     Enable the override path in templeton.override.jars
-   </description>
- </property>
-
- <property>
-    <name>templeton.streaming.jar</name>
-    <value>glusterfs:///apps/webhcat/hadoop-streaming.jar</value>
-    <description>The glusterfs path to the Hadoop streaming jar file.</description>
-  </property> 
-
-  <property>
-    <name>templeton.exec.timeout</name>
-    <value>60000</value>
-    <description>Time out for templeton api</description>
-  </property>
-
-  <property>
-    <name>templeton.hadoop.queue.name</name>
-    <value>default</value>
-    <description>MapReduce queue name where WebHCat map-only jobs will be submitted to. Can be used to avoid a deadlock where all map slots in the cluster are taken over by Templeton launcher tasks.</description>
-  </property>
-
-</configuration>

+ 0 - 46
ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/WEBHCAT/metainfo.xml

@@ -1,46 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>WEBHCAT</name>
-      <comment>This is comment for WEBHCAT service</comment>
-      <version>0.13.0.2.1</version>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>hive-webhcat</name>
-            </package>
-            <package>
-              <name>webhcat-tar-hive</name>
-            </package>
-            <package>
-              <name>webhcat-tar-pig</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-      <configuration-dependencies>
-        <config-type>webhcat-site</config-type>
-      </configuration-dependencies>
-    </service>
-  </services>
-</metainfo>

+ 1 - 2
ambari-server/src/main/resources/stacks/HDP/2.1/role_command_order.json

@@ -26,9 +26,8 @@
         "WEBHCAT_SERVER-START", "FLUME_HANDLER-START"],
     "MAPREDUCE_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"],
     "OOZIE_SERVICE_CHECK-SERVICE_CHECK": ["OOZIE_SERVER-START"],
-    "WEBHCAT_SERVICE_CHECK-SERVICE_CHECK": ["WEBHCAT_SERVER-START"],
     "HBASE_SERVICE_CHECK-SERVICE_CHECK": ["HBASE_MASTER-START", "HBASE_REGIONSERVER-START"],
-    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START"],
+    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START", "WEBHCAT_SERVER-START"],
     "HCAT_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START"],
     "PIG_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"],
     "SQOOP_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"],

+ 47 - 15
ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/metainfo.xml

@@ -37,31 +37,63 @@
           </dependencies>
         </component>
       </components>
-
-      <configuration-dependencies>
-        <config-type>hive-site</config-type>
-        <config-type>hive-log4j</config-type>
-        <config-type>hive-exec-log4j</config-type>
-        <config-type>tez-site</config-type>
-        <config-type>hive-env</config-type>
-      </configuration-dependencies>
-    </service>
-
-    <service>
-      <name>HCATALOG</name>
-      <comment>This is comment for HCATALOG service</comment>
-      <version>0.12.0.2.1</version>
       <osSpecifics>
         <osSpecific>
           <osFamily>any</osFamily>
           <packages>
+            <package>
+              <name>hive</name>
+            </package>
             <package>
               <name>hive-hcatalog</name>
             </package>
+            <package>
+              <name>hive-webhcat</name>
+            </package>
+            <package>
+              <name>webhcat-tar-hive</name>
+            </package>
+            <package>
+              <name>webhcat-tar-pig</name>
+            </package>
+            <package>
+              <name>mysql-connector-java</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>redhat5,redhat6,suse11</osFamily>
+          <packages>
+            <package>
+              <name>mysql</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>redhat5,redhat6,ubuntu12</osFamily>
+          <packages>
+            <package>
+              <name>mysql-server</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>suse11</osFamily>
+          <packages>
+            <package>
+              <name>mysql-client</name>
+            </package>
           </packages>
         </osSpecific>
       </osSpecifics>
-    </service>
 
+      <configuration-dependencies>
+        <config-type>hive-site</config-type>
+        <config-type>hive-log4j</config-type>
+        <config-type>hive-exec-log4j</config-type>
+        <config-type>tez-site</config-type>
+        <config-type>hive-env</config-type>
+      </configuration-dependencies>
+    </service>
   </services>
 </metainfo>

+ 0 - 143
ambari-server/src/main/resources/stacks/HDP/2.1/services/WEBHCAT/configuration/webhcat-site.xml

@@ -1,143 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- 
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-
-<!-- The default settings for Templeton. -->
-<!-- Edit templeton-site.xml to change settings for your local -->
-<!-- install. -->
-
-<configuration supports_final="true">
-
-  <property>
-    <name>templeton.port</name>
-      <value>50111</value>
-    <description>The HTTP port for the main server.</description>
-  </property>
-
-  <property>
-    <name>templeton.hadoop.conf.dir</name>
-    <value>/etc/hadoop/conf</value>
-    <description>The path to the Hadoop configuration.</description>
-  </property>
-
-  <property>
-    <name>templeton.jar</name>
-    <value>/usr/lib/hcatalog/share/webhcat/svr/webhcat.jar</value>
-    <description>The path to the Templeton jar file.</description>
-  </property>
-
-  <property>
-    <name>templeton.libjars</name>
-    <value>/usr/lib/zookeeper/zookeeper.jar</value>
-    <description>Jars to add the the classpath.</description>
-  </property>
-
-
-  <property>
-    <name>templeton.hadoop</name>
-    <value>/usr/bin/hadoop</value>
-    <description>The path to the Hadoop executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.pig.archive</name>
-    <value>hdfs:///apps/webhcat/pig.tar.gz</value>
-    <description>The path to the Pig archive.</description>
-  </property>
-
-  <property>
-    <name>templeton.pig.path</name>
-    <value>pig.tar.gz/pig/bin/pig</value>
-    <description>The path to the Pig executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hcat</name>
-    <value>/usr/bin/hcat</value>
-    <description>The path to the hcatalog executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.archive</name>
-    <value>hdfs:///apps/webhcat/hive.tar.gz</value>
-    <description>The path to the Hive archive.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.home</name>
-    <value>hive.tar.gz/hive</value>
-    <description>The path to the Hive home within the tar. Has no effect if templeton.hive.archive is not set.</description>
-  </property>
-
-  <property>
-    <name>templeton.hcat.home</name>
-    <value>hive.tar.gz/hive/hcatalog</value>
-    <description>The path to the HCat home within the tar. Has no effect if templeton.hive.archive is not set.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.path</name>
-    <value>hive.tar.gz/hive/bin/hive</value>
-    <description>The path to the Hive executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.properties</name>
-    <value>hive.metastore.local=false, hive.metastore.uris=thrift://localhost:9933, hive.metastore.sasl.enabled=false</value>
-    <description>Properties to set when running hive.</description>
-  </property>
-
-  <property>
-    <name>templeton.zookeeper.hosts</name>
-    <value>localhost:2181</value>
-    <description>ZooKeeper servers, as comma separated host:port pairs</description>
-  </property>
-
-  <property>
-    <name>templeton.storage.class</name>
-    <value>org.apache.hive.hcatalog.templeton.tool.ZooKeeperStorage</value>
-    <description>The class to use as storage</description>
-  </property>
-
-  <property>
-   <name>templeton.override.enabled</name>
-   <value>false</value>
-   <description>
-     Enable the override path in templeton.override.jars
-   </description>
- </property>
-
- <property>
-    <name>templeton.streaming.jar</name>
-    <value>hdfs:///apps/webhcat/hadoop-streaming.jar</value>
-    <description>The hdfs path to the Hadoop streaming jar file.</description>
-  </property> 
-
-  <property>
-    <name>templeton.exec.timeout</name>
-    <value>60000</value>
-    <description>Time out for templeton api</description>
-  </property>
-
-  <property>
-    <name>templeton.hadoop.queue.name</name>
-    <value>default</value>
-    <description>MapReduce queue name where WebHCat map-only jobs will be submitted to. Can be used to avoid a deadlock where all map slots in the cluster are taken over by Templeton launcher tasks.</description>
-  </property>
-
-</configuration>

+ 0 - 47
ambari-server/src/main/resources/stacks/HDP/2.1/services/WEBHCAT/metainfo.xml

@@ -1,47 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>WEBHCAT</name>
-      <comment>This is comment for WEBHCAT service</comment>
-      <version>0.13.0.2.1</version>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>hive-webhcat</name>
-            </package>
-            <package>
-              <name>webhcat-tar-hive</name>
-            </package>
-            <package>
-              <name>webhcat-tar-pig</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-      <configuration-dependencies>
-        <config-type>webhcat-site</config-type>
-        <config-type>webhcat-env</config-type>
-      </configuration-dependencies>
-    </service>
-  </services>
-</metainfo>

+ 0 - 4
ambari-server/src/main/resources/stacks/HDP/2.2.1/services/HIVE/metainfo.xml

@@ -23,9 +23,5 @@
       <comment>Data warehouse system for ad-hoc queries &amp; analysis of large datasets and table &amp; storage management service</comment>
       <version>0.14.0.2.2</version>
     </service>
-    <service>
-      <name>HCATALOG</name>
-      <version>0.14.0.2.2</version>
-    </service>
   </services>
 </metainfo>

+ 0 - 26
ambari-server/src/main/resources/stacks/HDP/2.2.1/services/WEBHCAT/metainfo.xml

@@ -1,26 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>WEBHCAT</name>
-      <version>0.14.0.2.2</version>
-    </service>
-  </services>
-</metainfo>

+ 0 - 0
ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/configuration/webhcat-site.xml → ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml


+ 12 - 17
ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml

@@ -32,6 +32,18 @@
             <package>
               <name>mysql-connector-java</name>
             </package>
+            <package>
+              <name>hive_2_9_9_9_117-hcatalog</name>
+            </package>
+            <package>
+              <name>hive_2_9_9_9_117-webhcat</name>
+            </package>
+            <package>
+              <name>webhcat-tar-hive</name>
+            </package>
+            <package>
+              <name>webhcat-tar-pig</name>
+            </package>
           </packages>
         </osSpecific>
         <osSpecific>
@@ -60,22 +72,5 @@
         </osSpecific>
       </osSpecifics>
     </service>
-
-    <service>
-      <name>HCATALOG</name>
-      <comment>This is comment for HCATALOG service</comment>
-      <version>0.14.0.2.9.9.9</version>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>hive_2_9_9_9_117-hcatalog</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-    </service>
-
   </services>
 </metainfo>

+ 10 - 19
ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java

@@ -18,35 +18,26 @@
 
 package org.apache.ambari.server.api.util;
 
+import com.google.inject.AbstractModule;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.metadata.ActionMetadata;
 import org.apache.ambari.server.state.*;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.easymock.EasyMock.*;
-import static org.junit.Assert.*;
-
 import org.apache.ambari.server.state.stack.ConfigurationXml;
 import org.junit.Test;
 import org.xml.sax.SAXException;
 
-import com.google.inject.AbstractModule;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-
 import javax.xml.bind.JAXBException;
 import javax.xml.namespace.QName;
 import javax.xml.parsers.ParserConfigurationException;
 import javax.xml.xpath.XPathExpressionException;
+import java.io.File;
+import java.io.IOException;
+import java.util.*;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.*;
 
 public class StackExtensionHelperTest {
 
@@ -305,7 +296,7 @@ public class StackExtensionHelperTest {
     helper.populateServicesForStack(stackInfo);
     helper.fillInfo();
     List<ServiceInfo> allServices = helper.getAllApplicableServices(stackInfo);
-    assertEquals(13, allServices.size());
+    assertEquals(12, allServices.size());
     for (ServiceInfo serviceInfo : allServices) {
       if (serviceInfo.getName().equals("NAGIOS")) {
         assertTrue(serviceInfo.isMonitoringService());

+ 26 - 97
ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java

@@ -18,69 +18,19 @@
 
 package org.apache.ambari.server.controller;
 
-import static org.easymock.EasyMock.capture;
-import static org.easymock.EasyMock.createNiceMock;
-import static org.easymock.EasyMock.createStrictMock;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.verify;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertSame;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.lang.reflect.Type;
-import java.net.ConnectException;
-import java.net.MalformedURLException;
-import java.net.UnknownHostException;
-import java.text.MessageFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-
-import javax.persistence.EntityManager;
-
+import com.google.gson.Gson;
+import com.google.gson.reflect.TypeToken;
+import com.google.inject.AbstractModule;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.persist.PersistService;
 import junit.framework.Assert;
-
-import org.apache.ambari.server.AmbariException;
-import org.apache.ambari.server.ClusterNotFoundException;
-import org.apache.ambari.server.DuplicateResourceException;
-import org.apache.ambari.server.HostNotFoundException;
-import org.apache.ambari.server.ObjectNotFoundException;
-import org.apache.ambari.server.ParentObjectNotFoundException;
-import org.apache.ambari.server.Role;
-import org.apache.ambari.server.RoleCommand;
-import org.apache.ambari.server.ServiceNotFoundException;
-import org.apache.ambari.server.StackAccessException;
-import org.apache.ambari.server.actionmanager.ActionDBAccessor;
-import org.apache.ambari.server.actionmanager.ActionType;
-import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
-import org.apache.ambari.server.actionmanager.HostRoleCommand;
-import org.apache.ambari.server.actionmanager.HostRoleStatus;
-import org.apache.ambari.server.actionmanager.Request;
-import org.apache.ambari.server.actionmanager.Stage;
-import org.apache.ambari.server.actionmanager.TargetHostType;
+import org.apache.ambari.server.*;
+import org.apache.ambari.server.actionmanager.*;
 import org.apache.ambari.server.agent.ExecutionCommand;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
-import org.apache.ambari.server.controller.internal.ComponentResourceProviderTest;
-import org.apache.ambari.server.controller.internal.HostResourceProviderTest;
-import org.apache.ambari.server.controller.internal.RequestOperationLevel;
-import org.apache.ambari.server.controller.internal.RequestResourceFilter;
-import org.apache.ambari.server.controller.internal.ServiceResourceProviderTest;
+import org.apache.ambari.server.controller.internal.*;
 import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.customactions.ActionDefinition;
 import org.apache.ambari.server.metadata.ActionMetadata;
@@ -93,52 +43,31 @@ import org.apache.ambari.server.security.authorization.Users;
 import org.apache.ambari.server.serveraction.ServerAction;
 import org.apache.ambari.server.serveraction.ServerActionManager;
 import org.apache.ambari.server.serveraction.ServerActionManagerImpl;
-import org.apache.ambari.server.state.Cluster;
-import org.apache.ambari.server.state.Clusters;
-import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigFactory;
-import org.apache.ambari.server.state.ConfigHelper;
-import org.apache.ambari.server.state.ConfigImpl;
-import org.apache.ambari.server.state.Host;
-import org.apache.ambari.server.state.HostComponentAdminState;
-import org.apache.ambari.server.state.HostState;
-import org.apache.ambari.server.state.MaintenanceState;
-import org.apache.ambari.server.state.RepositoryInfo;
-import org.apache.ambari.server.state.Service;
-import org.apache.ambari.server.state.ServiceComponent;
-import org.apache.ambari.server.state.ServiceComponentFactory;
-import org.apache.ambari.server.state.ServiceComponentHost;
-import org.apache.ambari.server.state.ServiceComponentHostFactory;
-import org.apache.ambari.server.state.ServiceFactory;
-import org.apache.ambari.server.state.StackId;
-import org.apache.ambari.server.state.StackInfo;
-import org.apache.ambari.server.state.State;
+import org.apache.ambari.server.state.*;
 import org.apache.ambari.server.state.configgroup.ConfigGroup;
 import org.apache.ambari.server.state.configgroup.ConfigGroupFactory;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostInstallEvent;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostOpSucceededEvent;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStartEvent;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStartedEvent;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStopEvent;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStoppedEvent;
+import org.apache.ambari.server.state.svccomphost.*;
 import org.apache.ambari.server.utils.StageUtils;
 import org.apache.commons.collections.CollectionUtils;
 import org.easymock.Capture;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Rule;
-import org.junit.Test;
+import org.junit.*;
 import org.junit.rules.ExpectedException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.gson.Gson;
-import com.google.gson.reflect.TypeToken;
-import com.google.inject.AbstractModule;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import com.google.inject.persist.PersistService;
+import javax.persistence.EntityManager;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.lang.reflect.Type;
+import java.net.ConnectException;
+import java.net.MalformedURLException;
+import java.net.UnknownHostException;
+import java.text.MessageFormat;
+import java.util.*;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.*;
 
 public class AmbariManagementControllerTest {
 
@@ -7110,7 +7039,7 @@ public class AmbariManagementControllerTest {
   public void testGetStackServices() throws Exception {
     StackServiceRequest request = new StackServiceRequest(STACK_NAME, NEW_STACK_VERSION, null);
     Set<StackServiceResponse> responses = controller.getStackServices(Collections.singleton(request));
-    Assert.assertEquals(12, responses.size());
+    Assert.assertEquals(11, responses.size());
 
 
     StackServiceRequest requestWithParams = new StackServiceRequest(STACK_NAME, NEW_STACK_VERSION, SERVICE_NAME);

+ 13 - 14
ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java

@@ -4,9 +4,6 @@ import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.StackServiceResponse;
 import org.apache.ambari.server.state.DependencyInfo;
 import org.easymock.EasyMockSupport;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.isA;
-
 import org.junit.Before;
 import org.junit.Test;
 
@@ -15,7 +12,9 @@ import java.util.Collections;
 import java.util.HashSet;
 import java.util.Set;
 
-import static org.junit.Assert.*;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.isA;
+import static org.junit.Assert.assertEquals;
 
 /**
  * Licensed to the Apache Software Foundation (ASF) under one
@@ -53,7 +52,7 @@ public class BaseBlueprintProcessorTest {
     expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn(Collections.<StackServiceResponse>emptySet());
 
     // test dependencies
-    final DependencyInfo hCatDependency = new TestDependencyInfo("WEBHCAT/HCAT");
+    final DependencyInfo hCatDependency = new TestDependencyInfo("HIVE/HCAT");
     final DependencyInfo yarnClientDependency = new TestDependencyInfo("YARN/YARN_CLIENT");
     final DependencyInfo tezClientDependency = new TestDependencyInfo("TEZ/TEZ_CLIENT");
     final DependencyInfo mapReduceTwoClientDependency = new TestDependencyInfo("YARN/MAPREDUCE2_CLIENT");
@@ -92,7 +91,7 @@ public class BaseBlueprintProcessorTest {
                  5, testStack.getDependencyConditionalServiceMap().size());
 
     assertEquals("Incorrect service dependency for HCAT",
-                 "HCATALOG", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
+                 "HIVE", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
     assertEquals("Incorrect service dependency for YARN_CLIENT",
                  "YARN", testStack.getDependencyConditionalServiceMap().get(yarnClientDependency));
     assertEquals("Incorrect service dependency for TEZ_CLIENT",
@@ -175,7 +174,7 @@ public class BaseBlueprintProcessorTest {
     expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn(Collections.<StackServiceResponse>emptySet());
 
     // test dependencies
-    final DependencyInfo hCatDependency = new TestDependencyInfo("WEBHCAT/HCAT");
+    final DependencyInfo hCatDependency = new TestDependencyInfo("HIVE/HCAT");
     final DependencyInfo tezClientDependency = new TestDependencyInfo("TEZ/TEZ_CLIENT");
     final DependencyInfo mapReduceTwoClientDependency = new TestDependencyInfo("YARN/MAPREDUCE2_CLIENT");
     final DependencyInfo oozieClientDependency = new TestDependencyInfo("OOZIE/OOZIE_CLIENT");
@@ -212,7 +211,7 @@ public class BaseBlueprintProcessorTest {
       4, testStack.getDependencyConditionalServiceMap().size());
 
     assertEquals("Incorrect service dependency for HCAT",
-      "HCATALOG", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
+      "HIVE", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
     assertEquals("Incorrect service dependency for TEZ_CLIENT",
       "TEZ", testStack.getDependencyConditionalServiceMap().get(tezClientDependency));
     assertEquals("Incorrect service dependency for MAPREDUCE2_CLIENT",
@@ -234,7 +233,7 @@ public class BaseBlueprintProcessorTest {
     expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn(Collections.<StackServiceResponse>emptySet());
 
     // test dependencies
-    final DependencyInfo hCatDependency = new TestDependencyInfo("WEBHCAT/HCAT");
+    final DependencyInfo hCatDependency = new TestDependencyInfo("HIVE/HCAT");
     final DependencyInfo yarnClientDependency = new TestDependencyInfo("YARN/YARN_CLIENT");
     final DependencyInfo mapReduceTwoClientDependency = new TestDependencyInfo("YARN/MAPREDUCE2_CLIENT");
     final DependencyInfo oozieClientDependency = new TestDependencyInfo("OOZIE/OOZIE_CLIENT");
@@ -271,7 +270,7 @@ public class BaseBlueprintProcessorTest {
       4, testStack.getDependencyConditionalServiceMap().size());
 
     assertEquals("Incorrect service dependency for HCAT",
-      "HCATALOG", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
+      "HIVE", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
     assertEquals("Incorrect service dependency for YARN_CLIENT",
       "YARN", testStack.getDependencyConditionalServiceMap().get(yarnClientDependency));
     assertEquals("Incorrect service dependency for MAPREDUCE2_CLIENT",
@@ -293,7 +292,7 @@ public class BaseBlueprintProcessorTest {
     expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn(Collections.<StackServiceResponse>emptySet());
 
     // test dependencies
-    final DependencyInfo hCatDependency = new TestDependencyInfo("WEBHCAT/HCAT");
+    final DependencyInfo hCatDependency = new TestDependencyInfo("HIVE/HCAT");
     final DependencyInfo yarnClientDependency = new TestDependencyInfo("YARN/YARN_CLIENT");
     final DependencyInfo tezClientDependency = new TestDependencyInfo("TEZ/TEZ_CLIENT");
     final DependencyInfo oozieClientDependency = new TestDependencyInfo("OOZIE/OOZIE_CLIENT");
@@ -330,7 +329,7 @@ public class BaseBlueprintProcessorTest {
       4, testStack.getDependencyConditionalServiceMap().size());
 
     assertEquals("Incorrect service dependency for HCAT",
-      "HCATALOG", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
+      "HIVE", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
     assertEquals("Incorrect service dependency for YARN_CLIENT",
       "YARN", testStack.getDependencyConditionalServiceMap().get(yarnClientDependency));
     assertEquals("Incorrect service dependency for TEZ_CLIENT",
@@ -352,7 +351,7 @@ public class BaseBlueprintProcessorTest {
     expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn(Collections.<StackServiceResponse>emptySet());
 
     // test dependencies
-    final DependencyInfo hCatDependency = new TestDependencyInfo("WEBHCAT/HCAT");
+    final DependencyInfo hCatDependency = new TestDependencyInfo("HIVE/HCAT");
     final DependencyInfo yarnClientDependency = new TestDependencyInfo("YARN/YARN_CLIENT");
     final DependencyInfo tezClientDependency = new TestDependencyInfo("TEZ/TEZ_CLIENT");
     final DependencyInfo mapReduceTwoClientDependency = new TestDependencyInfo("YARN/MAPREDUCE2_CLIENT");
@@ -389,7 +388,7 @@ public class BaseBlueprintProcessorTest {
       4, testStack.getDependencyConditionalServiceMap().size());
 
     assertEquals("Incorrect service dependency for HCAT",
-      "HCATALOG", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
+      "HIVE", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
     assertEquals("Incorrect service dependency for YARN_CLIENT",
       "YARN", testStack.getDependencyConditionalServiceMap().get(yarnClientDependency));
     assertEquals("Incorrect service dependency for TEZ_CLIENT",

+ 168 - 73
ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java

@@ -18,93 +18,51 @@
 
 package org.apache.ambari.server.upgrade;
 
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.assertNull;
-import static junit.framework.Assert.assertTrue;
-import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.capture;
-import static org.easymock.EasyMock.createMockBuilder;
-import static org.easymock.EasyMock.createNiceMock;
-import static org.easymock.EasyMock.createStrictMock;
-import static org.easymock.EasyMock.eq;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.easymock.EasyMock.isA;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.reset;
-import static org.easymock.EasyMock.verify;
-
-import java.lang.reflect.Field;
-import java.lang.reflect.Method;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import javax.persistence.EntityManager;
-import javax.persistence.EntityTransaction;
-import javax.persistence.TypedQuery;
-import javax.persistence.criteria.CriteriaBuilder;
-import javax.persistence.criteria.CriteriaQuery;
-import javax.persistence.criteria.Order;
-import javax.persistence.criteria.Path;
-import javax.persistence.criteria.Predicate;
-import javax.persistence.criteria.Root;
-import javax.persistence.metamodel.SingularAttribute;
-
+import com.google.inject.*;
+import com.google.inject.persist.PersistService;
+import com.google.inject.persist.Transactional;
+import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.orm.DBAccessor;
-import org.apache.ambari.server.orm.dao.ClusterDAO;
-import org.apache.ambari.server.orm.dao.KeyValueDAO;
-import org.apache.ambari.server.orm.dao.PermissionDAO;
-import org.apache.ambari.server.orm.dao.PrincipalDAO;
-import org.apache.ambari.server.orm.dao.PrincipalTypeDAO;
-import org.apache.ambari.server.orm.dao.PrivilegeDAO;
-import org.apache.ambari.server.orm.dao.ResourceDAO;
-import org.apache.ambari.server.orm.dao.ResourceTypeDAO;
-import org.apache.ambari.server.orm.dao.UserDAO;
-import org.apache.ambari.server.orm.dao.ViewDAO;
-import org.apache.ambari.server.orm.dao.ViewInstanceDAO;
-import org.apache.ambari.server.orm.dao.ConfigGroupConfigMappingDAO;
-import org.apache.ambari.server.orm.entities.ClusterEntity;
-import org.apache.ambari.server.orm.entities.HostRoleCommandEntity;
-import org.apache.ambari.server.orm.entities.KeyValueEntity;
-import org.apache.ambari.server.orm.entities.PrivilegeEntity;
-import org.apache.ambari.server.orm.entities.ResourceEntity;
-import org.apache.ambari.server.orm.entities.UserEntity;
-import org.apache.ambari.server.orm.entities.ViewEntity;
-import org.apache.ambari.server.orm.entities.ViewInstanceEntity;
-import org.apache.ambari.server.orm.entities.ConfigGroupConfigMappingEntity;
-import org.apache.ambari.server.orm.entities.ClusterConfigEntity;
-import org.apache.ambari.server.state.Cluster;
-import org.apache.ambari.server.state.Clusters;
-import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigHelper;
-import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.orm.GuiceJpaInitializer;
+import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
+import org.apache.ambari.server.orm.dao.*;
+import org.apache.ambari.server.orm.entities.*;
+import org.apache.ambari.server.state.*;
 import org.easymock.Capture;
+import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.google.inject.Binder;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import com.google.inject.Module;
-import com.google.inject.Provider;
+import javax.persistence.EntityManager;
+import javax.persistence.EntityTransaction;
+import javax.persistence.TypedQuery;
+import javax.persistence.criteria.*;
+import javax.persistence.metamodel.SingularAttribute;
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.*;
+
+import static junit.framework.Assert.*;
+import static org.easymock.EasyMock.*;
 
 /**
  * UpgradeCatalog170 unit tests.
  */
 public class UpgradeCatalog170Test {
 
+  private Injector injector;
+  private final String CLUSTER_NAME = "c1";
+  private final String SERVICE_NAME = "HDFS";
+  private final String HOST_NAME = "h1";
+  private final String DESIRED_STACK_VERSION = "{\"stackName\":\"HDP\",\"stackVersion\":\"2.0.6\"}";
+
   Provider<EntityManager> entityManagerProvider = createStrictMock(Provider.class);
   EntityManager entityManager = createStrictMock(EntityManager.class);
 
@@ -113,8 +71,145 @@ public class UpgradeCatalog170Test {
     reset(entityManagerProvider);
     expect(entityManagerProvider.get()).andReturn(entityManager).anyTimes();
     replay(entityManagerProvider);
+    injector = Guice.createInjector(new InMemoryDefaultTestModule());
+    injector.getInstance(GuiceJpaInitializer.class);
+  }
+
+  @After
+  public void tearDown() {
+    injector.getInstance(PersistService.class).stop();
+  }
+
+  private ClusterEntity createCluster() {
+    ResourceTypeDAO resourceTypeDAO = injector.getInstance(ResourceTypeDAO.class);
+
+    // create an admin resource to represent this cluster
+    ResourceTypeEntity resourceTypeEntity = resourceTypeDAO.findById(ResourceTypeEntity.CLUSTER_RESOURCE_TYPE);
+    if (resourceTypeEntity == null) {
+      resourceTypeEntity = new ResourceTypeEntity();
+      resourceTypeEntity.setId(ResourceTypeEntity.CLUSTER_RESOURCE_TYPE);
+      resourceTypeEntity.setName(ResourceTypeEntity.CLUSTER_RESOURCE_TYPE_NAME);
+      resourceTypeEntity = resourceTypeDAO.merge(resourceTypeEntity);
+    }
+    ResourceEntity resourceEntity = new ResourceEntity();
+    resourceEntity.setResourceType(resourceTypeEntity);
+
+    ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);
+    ClusterEntity clusterEntity = new ClusterEntity();
+    clusterEntity.setClusterId(1L);
+    clusterEntity.setClusterName(CLUSTER_NAME);
+    clusterEntity.setDesiredStackVersion(DESIRED_STACK_VERSION);
+    clusterEntity.setResource(resourceEntity);
+    clusterDAO.create(clusterEntity);
+    return clusterEntity;
+  }
+
+  private ClusterServiceEntity createService(ClusterEntity clusterEntity, String serviceName) {
+    ClusterServiceDAO clusterServiceDAO = injector.getInstance(ClusterServiceDAO.class);
+    ClusterServiceEntity clusterServiceEntity = new ClusterServiceEntity();
+    clusterServiceEntity.setClusterId(1L);
+    clusterServiceEntity.setClusterEntity(clusterEntity);
+    clusterServiceEntity.setServiceName(serviceName);
+    clusterServiceDAO.create(clusterServiceEntity);
+    return clusterServiceEntity;
   }
 
+  private ClusterServiceEntity addService(ClusterEntity clusterEntity, String serviceName) {
+    ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);
+
+    ClusterServiceEntity clusterServiceEntity = createService(clusterEntity, serviceName);
+
+    ServiceDesiredStateEntity serviceDesiredStateEntity = new ServiceDesiredStateEntity();
+    serviceDesiredStateEntity.setDesiredStackVersion(DESIRED_STACK_VERSION);
+    serviceDesiredStateEntity.setClusterId(1L);
+    serviceDesiredStateEntity.setServiceName(serviceName);
+    serviceDesiredStateEntity.setClusterServiceEntity(clusterServiceEntity);
+
+    clusterServiceEntity.setServiceDesiredStateEntity(serviceDesiredStateEntity);
+    clusterEntity.getClusterServiceEntities().add(clusterServiceEntity);
+
+    clusterDAO.merge(clusterEntity);
+
+    return clusterServiceEntity;
+  }
+
+
+  private HostEntity createHost(ClusterEntity clusterEntity) {
+    HostDAO hostDAO = injector.getInstance(HostDAO.class);
+    ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);
+    HostEntity hostEntity = new HostEntity();
+    hostEntity.setHostName(HOST_NAME);
+    hostEntity.setClusterEntities(Collections.singletonList(clusterEntity));
+    hostDAO.create(hostEntity);
+    clusterEntity.getHostEntities().add(hostEntity);
+    clusterDAO.merge(clusterEntity);
+    return hostEntity;
+  }
+
+  @Transactional
+  private void addComponent(ClusterEntity clusterEntity, ClusterServiceEntity clusterServiceEntity, HostEntity hostEntity, String componentName) {
+    ServiceComponentDesiredStateEntity componentDesiredStateEntity = new ServiceComponentDesiredStateEntity();
+    componentDesiredStateEntity.setClusterServiceEntity(clusterServiceEntity);
+    componentDesiredStateEntity.setComponentName(componentName);
+    componentDesiredStateEntity.setServiceName(clusterServiceEntity.getServiceName());
+    componentDesiredStateEntity.setDesiredStackVersion(DESIRED_STACK_VERSION);
+    componentDesiredStateEntity.setClusterServiceEntity(clusterServiceEntity);
+    componentDesiredStateEntity.setClusterId(clusterServiceEntity.getClusterId());
+
+    HostComponentDesiredStateDAO hostComponentDesiredStateDAO = injector.getInstance(HostComponentDesiredStateDAO.class);
+    HostComponentDesiredStateEntity hostComponentDesiredStateEntity = new HostComponentDesiredStateEntity();
+    hostComponentDesiredStateEntity.setClusterId(clusterEntity.getClusterId());
+    hostComponentDesiredStateEntity.setHostName(hostEntity.getHostName());
+    hostComponentDesiredStateEntity.setComponentName(componentName);
+    hostComponentDesiredStateEntity.setServiceName(clusterServiceEntity.getServiceName());
+    hostComponentDesiredStateEntity.setAdminState(HostComponentAdminState.INSERVICE);
+    hostComponentDesiredStateEntity.setServiceComponentDesiredStateEntity(componentDesiredStateEntity);
+    hostComponentDesiredStateEntity.setHostEntity(hostEntity);
+    hostComponentDesiredStateDAO.create(hostComponentDesiredStateEntity);
+
+
+    HostComponentStateEntity hostComponentStateEntity = new HostComponentStateEntity();
+    hostComponentStateEntity.setHostEntity(hostEntity);
+    hostComponentStateEntity.setHostName(hostEntity.getHostName());
+    hostComponentStateEntity.setComponentName(componentName);
+    hostComponentStateEntity.setServiceName(clusterServiceEntity.getServiceName());
+    hostComponentStateEntity.setClusterId(clusterEntity.getClusterId());
+    hostComponentStateEntity.setCurrentStackVersion(clusterEntity.getDesiredStackVersion());
+    hostComponentStateEntity.setServiceComponentDesiredStateEntity(componentDesiredStateEntity);
+
+    componentDesiredStateEntity.setHostComponentStateEntities(Collections.singletonList(hostComponentStateEntity));
+    componentDesiredStateEntity.setHostComponentDesiredStateEntities(Collections.singletonList(hostComponentDesiredStateEntity));
+
+    hostEntity.getHostComponentStateEntities().add(hostComponentStateEntity);
+    hostEntity.getHostComponentDesiredStateEntities().add(hostComponentDesiredStateEntity);
+
+    clusterServiceEntity.getServiceComponentDesiredStateEntities().add(componentDesiredStateEntity);
+
+    ClusterServiceDAO clusterServiceDAO = injector.getInstance(ClusterServiceDAO.class);
+    ServiceComponentDesiredStateDAO serviceComponentDesiredStateDAO = injector.getInstance(ServiceComponentDesiredStateDAO.class);
+    HostDAO hostDAO = injector.getInstance(HostDAO.class);
+    serviceComponentDesiredStateDAO.merge(componentDesiredStateEntity);
+    hostDAO.merge(hostEntity);
+    clusterServiceDAO.merge(clusterServiceEntity);
+  }
+
+  @Test
+  public void testMoveHcatalogIntoHiveService()  throws AmbariException{
+    final ClusterEntity clusterEntity = createCluster();
+    final ClusterServiceEntity clusterServiceEntityHDFS = addService(clusterEntity, "HDFS");
+    final ClusterServiceEntity clusterServiceEntityHIVE = addService(clusterEntity, "HIVE");
+    final ClusterServiceEntity clusterServiceEntityHCATALOG = addService(clusterEntity, "HCATALOG");
+    final ClusterServiceEntity clusterServiceEntityWEBHCAT = addService(clusterEntity, "WEBHCAT");
+    final HostEntity hostEntity = createHost(clusterEntity);
+    addComponent(clusterEntity, clusterServiceEntityHDFS, hostEntity, "NAMENODE");
+    addComponent(clusterEntity, clusterServiceEntityHIVE, hostEntity, "HIVE_SERVER");
+    addComponent(clusterEntity, clusterServiceEntityHCATALOG, hostEntity, "HCAT");
+    addComponent(clusterEntity, clusterServiceEntityWEBHCAT, hostEntity, "WEBHCAT_SERVER");
+    UpgradeCatalog170 upgradeCatalog170 = injector.getInstance(UpgradeCatalog170.class);
+    upgradeCatalog170.moveHcatalogIntoHiveService();
+  }
+
+
   @Test
   public void testExecuteDDLUpdates() throws Exception {
 

+ 20 - 0
ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py

@@ -59,6 +59,16 @@ class TestServiceCheck(RMFTestCase):
                         user = 'ambari-qa',
                         try_sleep = 5,
     )
+    self.assertResourceCalled('File', '/tmp/templetonSmoke.sh',
+                              content = StaticFile('templetonSmoke.sh'),
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa no_keytab false /usr/bin/kinit',
+                              logoutput = True,
+                              path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+                              tries = 3,
+                              try_sleep = 5,
+                              )
     self.assertNoMoreResources()
 
   @patch("sys.exit")
@@ -96,4 +106,14 @@ class TestServiceCheck(RMFTestCase):
                         user = 'ambari-qa',
                         try_sleep = 5,
     )
+    self.assertResourceCalled('File', '/tmp/templetonSmoke.sh',
+                              content = StaticFile('templetonSmoke.sh'),
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa /etc/security/keytabs/smokeuser.headless.keytab true /usr/bin/kinit',
+                              logoutput = True,
+                              path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+                              tries = 3,
+                              try_sleep = 5,
+                              )
     self.assertNoMoreResources()

+ 6 - 6
ambari-server/src/test/python/stacks/1.3.2/WEBHCAT/test_webhcat_server.py → ambari-server/src/test/python/stacks/1.3.2/HIVE/test_webhcat_server.py

@@ -23,7 +23,7 @@ from stacks.utils.RMFTestCase import *
 class TestWebHCatServer(RMFTestCase):
 
   def test_configure_default(self):
-    self.executeScript("1.3.2/services/WEBHCAT/package/scripts/webhcat_server.py",
+    self.executeScript("1.3.2/services/HIVE/package/scripts/webhcat_server.py",
                        classname = "WebHCatServer",
                        command = "configure",
                        config_file="default.json"
@@ -32,7 +32,7 @@ class TestWebHCatServer(RMFTestCase):
     self.assertNoMoreResources()
 
   def test_start_default(self):
-    self.executeScript("1.3.2/services/WEBHCAT/package/scripts/webhcat_server.py",
+    self.executeScript("1.3.2/services/HIVE/package/scripts/webhcat_server.py",
                        classname = "WebHCatServer",
                        command = "start",
                        config_file="default.json"
@@ -46,7 +46,7 @@ class TestWebHCatServer(RMFTestCase):
     self.assertNoMoreResources()
 
   def test_stop_default(self):
-    self.executeScript("1.3.2/services/WEBHCAT/package/scripts/webhcat_server.py",
+    self.executeScript("1.3.2/services/HIVE/package/scripts/webhcat_server.py",
                        classname = "WebHCatServer",
                        command = "stop",
                        config_file="default.json"
@@ -59,7 +59,7 @@ class TestWebHCatServer(RMFTestCase):
     self.assertNoMoreResources()
 
     def test_configure_secured(self):
-      self.executeScript("1.3.2/services/WEBHCAT/package/scripts/webhcat_server.py",
+      self.executeScript("1.3.2/services/HIVE/package/scripts/webhcat_server.py",
                          classname = "WebHCatServer",
                          command = "configure",
                          config_file="secured.json"
@@ -69,7 +69,7 @@ class TestWebHCatServer(RMFTestCase):
       self.assertNoMoreResources()
 
   def test_start_secured(self):
-    self.executeScript("1.3.2/services/WEBHCAT/package/scripts/webhcat_server.py",
+    self.executeScript("1.3.2/services/HIVE/package/scripts/webhcat_server.py",
                        classname = "WebHCatServer",
                        command = "start",
                        config_file="secured.json"
@@ -83,7 +83,7 @@ class TestWebHCatServer(RMFTestCase):
     self.assertNoMoreResources()
 
   def test_stop_secured(self):
-    self.executeScript("1.3.2/services/WEBHCAT/package/scripts/webhcat_server.py",
+    self.executeScript("1.3.2/services/HIVE/package/scripts/webhcat_server.py",
                        classname = "WebHCatServer",
                        command = "stop",
                        config_file="secured.json"

+ 0 - 61
ambari-server/src/test/python/stacks/1.3.2/WEBHCAT/test_webhcat_service_check.py

@@ -1,61 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-from mock.mock import MagicMock, call, patch
-from stacks.utils.RMFTestCase import *
-
-class TestServiceCheck(RMFTestCase):
-
-  def test_service_check_default(self):
-
-    self.executeScript("1.3.2/services/WEBHCAT/package/scripts/service_check.py",
-                       classname="WebHCatServiceCheck",
-                       command="service_check",
-                       config_file="default.json"
-    )
-    self.assertResourceCalled('File', '/tmp/templetonSmoke.sh',
-                       content = StaticFile('templetonSmoke.sh'),
-                       mode = 0755,
-    )
-    self.assertResourceCalled('Execute', 'sh /tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa no_keytab false /usr/bin/kinit',
-                       logoutput = True,
-                       path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
-                       tries = 3,
-                       try_sleep = 5,
-    )
-    self.assertNoMoreResources()
-
-  def test_service_check_secured(self):
-
-    self.executeScript("1.3.2/services/WEBHCAT/package/scripts/service_check.py",
-                       classname="WebHCatServiceCheck",
-                       command="service_check",
-                       config_file="secured.json"
-    )
-    self.assertResourceCalled('File', '/tmp/templetonSmoke.sh',
-                       content = StaticFile('templetonSmoke.sh'),
-                       mode = 0755,
-    )
-    self.assertResourceCalled('Execute', 'sh /tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa /etc/security/keytabs/smokeuser.headless.keytab true /usr/bin/kinit',
-                       logoutput = True,
-                       path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
-                       tries = 3,
-                       try_sleep = 5,
-    )
-    self.assertNoMoreResources()

+ 21 - 0
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py

@@ -63,6 +63,16 @@ class TestServiceCheck(RMFTestCase):
                         environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
                         try_sleep = 5,
     )
+    self.assertResourceCalled('File', '/tmp/templetonSmoke.sh',
+                              content = StaticFile('templetonSmoke.sh'),
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa no_keytab false /usr/bin/kinit',
+                              logoutput = True,
+                              path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+                              tries = 3,
+                              try_sleep = 5,
+                              )
     self.assertNoMoreResources()
 
   @patch("sys.exit")
@@ -103,4 +113,15 @@ class TestServiceCheck(RMFTestCase):
                         environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
                         try_sleep = 5,
     )
+    self.assertResourceCalled('File', '/tmp/templetonSmoke.sh',
+                              content = StaticFile('templetonSmoke.sh'),
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa /etc/security/keytabs/smokeuser.headless.keytab true /usr/bin/kinit',
+                              logoutput = True,
+                              path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+                              tries = 3,
+                              try_sleep = 5,
+                              )
+    self.assertNoMoreResources()
     self.assertNoMoreResources()

+ 53 - 56
ambari-server/src/test/python/stacks/2.0.6/WEBHCAT/test_webhcat_server.py → ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py

@@ -20,13 +20,10 @@ limitations under the License.
 from mock.mock import MagicMock, patch
 from stacks.utils.RMFTestCase import *
 
-from resource_management.libraries import functions
-import json
-
 class TestWebHCatServer(RMFTestCase):
 
   def test_configure_default(self):
-    self.executeScript("2.0.6/services/WEBHCAT/package/scripts/webhcat_server.py",
+    self.executeScript("2.0.6/services/HIVE/package/scripts/webhcat_server.py",
                        classname = "WebHCatServer",
                        command = "configure",
                        config_file="default.json"
@@ -35,7 +32,7 @@ class TestWebHCatServer(RMFTestCase):
     self.assertNoMoreResources()
 
   def test_start_default(self):
-    self.executeScript("2.0.6/services/WEBHCAT/package/scripts/webhcat_server.py",
+    self.executeScript("2.0.6/services/HIVE/package/scripts/webhcat_server.py",
                        classname = "WebHCatServer",
                        command = "start",
                        config_file="default.json"
@@ -49,7 +46,7 @@ class TestWebHCatServer(RMFTestCase):
     self.assertNoMoreResources()
 
   def test_stop_default(self):
-    self.executeScript("2.0.6/services/WEBHCAT/package/scripts/webhcat_server.py",
+    self.executeScript("2.0.6/services/HIVE/package/scripts/webhcat_server.py",
                        classname = "WebHCatServer",
                        command = "stop",
                        config_file="default.json"
@@ -62,7 +59,7 @@ class TestWebHCatServer(RMFTestCase):
     self.assertNoMoreResources()
 
     def test_configure_secured(self):
-      self.executeScript("2.0.6/services/WEBHCAT/package/scripts/webhcat_server.py",
+      self.executeScript("2.0.6/services/HIVE/package/scripts/webhcat_server.py",
                          classname = "WebHCatServer",
                          command = "configure",
                          config_file="secured.json"
@@ -72,7 +69,7 @@ class TestWebHCatServer(RMFTestCase):
       self.assertNoMoreResources()
 
   def test_start_secured(self):
-    self.executeScript("2.0.6/services/WEBHCAT/package/scripts/webhcat_server.py",
+    self.executeScript("2.0.6/services/HIVE/package/scripts/webhcat_server.py",
                        classname = "WebHCatServer",
                        command = "start",
                        config_file="secured.json"
@@ -86,7 +83,7 @@ class TestWebHCatServer(RMFTestCase):
     self.assertNoMoreResources()
 
   def test_stop_secured(self):
-    self.executeScript("2.0.6/services/WEBHCAT/package/scripts/webhcat_server.py",
+    self.executeScript("2.0.6/services/HIVE/package/scripts/webhcat_server.py",
                        classname = "WebHCatServer",
                        command = "stop",
                        config_file="secured.json"
@@ -131,33 +128,33 @@ class TestWebHCatServer(RMFTestCase):
                               action = ['create'],
                               )
     self.assertResourceCalled('Directory', '/var/run/webhcat',
-      owner = 'hcat',
-      group = 'hadoop',
-      recursive = True,
-      mode = 0755,
-    )
+                              owner = 'hcat',
+                              group = 'hadoop',
+                              recursive = True,
+                              mode = 0755,
+                              )
     self.assertResourceCalled('Directory', '/var/log/webhcat',
-      owner = 'hcat',
-      group = 'hadoop',
-      recursive = True,
-      mode = 0755,
-    )
+                              owner = 'hcat',
+                              group = 'hadoop',
+                              recursive = True,
+                              mode = 0755,
+                              )
     self.assertResourceCalled('Directory', '/etc/hcatalog/conf',
-      owner = 'hcat',
-      group = 'hadoop',
-    )
+                              owner = 'hcat',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
-      owner = 'hcat',
-      group = 'hadoop',
-      conf_dir = '/etc/hcatalog/conf',
-      configurations = self.getConfig()['configurations']['webhcat-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['webhcat-site']
+                              owner = 'hcat',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hcatalog/conf',
+                              configurations = self.getConfig()['configurations']['webhcat-site'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['webhcat-site']
     )
     self.assertResourceCalled('File', '/etc/hcatalog/conf/webhcat-env.sh',
-      content = InlineTemplate(self.getConfig()['configurations']['webhcat-env']['content']),
-      owner = 'hcat',
-      group = 'hadoop',
-    )
+                              content = InlineTemplate(self.getConfig()['configurations']['webhcat-env']['content']),
+                              owner = 'hcat',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('CopyFromLocal', '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar',
                               owner='hcat',
                               mode=0755,
@@ -216,37 +213,37 @@ class TestWebHCatServer(RMFTestCase):
                               action = ['create'],
                               )
     self.assertResourceCalled('Directory', '/var/run/webhcat',
-      owner = 'hcat',
-      group = 'hadoop',
-      recursive = True,
-      mode = 0755,
-    )
+                              owner = 'hcat',
+                              group = 'hadoop',
+                              recursive = True,
+                              mode = 0755,
+                              )
     self.assertResourceCalled('Directory', '/var/log/webhcat',
-      owner = 'hcat',
-      group = 'hadoop',
-      recursive = True,
-      mode = 0755,
-    )
+                              owner = 'hcat',
+                              group = 'hadoop',
+                              recursive = True,
+                              mode = 0755,
+                              )
     self.assertResourceCalled('Directory', '/etc/hcatalog/conf',
-      owner = 'hcat',
-      group = 'hadoop',
-    )
+                              owner = 'hcat',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
-      owner = 'hcat',
-      group = 'hadoop',
-      conf_dir = '/etc/hcatalog/conf',
-      configurations = self.getConfig()['configurations']['webhcat-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['webhcat-site']
+                              owner = 'hcat',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hcatalog/conf',
+                              configurations = self.getConfig()['configurations']['webhcat-site'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['webhcat-site']
     )
     self.assertResourceCalled('File', '/etc/hcatalog/conf/webhcat-env.sh',
-      content = InlineTemplate(self.getConfig()['configurations']['webhcat-env']['content']),
-      owner = 'hcat',
-      group = 'hadoop',
-    )
+                              content = InlineTemplate(self.getConfig()['configurations']['webhcat-env']['content']),
+                              owner = 'hcat',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
-      path = ['/bin'],
-      user = 'hcat',
-    )
+                              path = ['/bin'],
+                              user = 'hcat',
+                              )
     self.assertResourceCalled('CopyFromLocal', '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar',
                               owner='hcat',
                               mode=0755,

+ 0 - 61
ambari-server/src/test/python/stacks/2.0.6/WEBHCAT/test_webhcat_service_check.py

@@ -1,61 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-from mock.mock import MagicMock, call, patch
-from stacks.utils.RMFTestCase import *
-
-class TestServiceCheck(RMFTestCase):
-
-  def test_service_check_default(self):
-
-    self.executeScript("2.0.6/services/WEBHCAT/package/scripts/service_check.py",
-                       classname="WebHCatServiceCheck",
-                       command="service_check",
-                       config_file="default.json"
-    )
-    self.assertResourceCalled('File', '/tmp/templetonSmoke.sh',
-                       content = StaticFile('templetonSmoke.sh'),
-                       mode = 0755,
-    )
-    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa no_keytab false /usr/bin/kinit',
-                       logoutput = True,
-                       path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
-                       tries = 3,
-                       try_sleep = 5,
-    )
-    self.assertNoMoreResources()
-
-  def test_service_check_secured(self):
-
-    self.executeScript("2.0.6/services/WEBHCAT/package/scripts/service_check.py",
-                       classname="WebHCatServiceCheck",
-                       command="service_check",
-                       config_file="secured.json"
-    )
-    self.assertResourceCalled('File', '/tmp/templetonSmoke.sh',
-                       content = StaticFile('templetonSmoke.sh'),
-                       mode = 0755,
-    )
-    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa /etc/security/keytabs/smokeuser.headless.keytab true /usr/bin/kinit',
-                       logoutput = True,
-                       path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
-                       tries = 3,
-                       try_sleep = 5,
-    )
-    self.assertNoMoreResources()

+ 74 - 0
ambari-server/src/test/resources/stacks/HDP/2.0.5/services/HIVE/metainfo.xml

@@ -80,6 +80,49 @@
           </commandScript>
         </component>
 
+        <component>
+          <name>WEBHCAT_SERVER</name>
+          <displayName>WebHCat Server</displayName>
+          <category>MASTER</category>
+          <cardinality>1</cardinality>
+          <dependencies>
+            <dependency>
+              <name>HDFS/HDFS_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>MAPREDUCE/MAPREDUCE_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>ZOOKEEPER/ZOOKEEPER_SERVER</name>
+              <scope>cluster</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+                <co-locate>WEBHCAT/WEBHCAT_SERVER</co-locate>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>ZOOKEEPER/ZOOKEEPER_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+          <commandScript>
+            <script>scripts/webhcat_server.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+        </component>
+
         <component>
           <name>MYSQL_SERVER</name>
           <category>MASTER</category>
@@ -111,6 +154,37 @@
             </configFile>
           </configFiles>
         </component>
+        <component>
+          <name>HCAT</name>
+          <displayName>HCat</displayName>
+          <category>CLIENT</category>
+          <commandScript>
+            <script>scripts/hcat_client.py</script>
+            <scriptType>PYTHON</scriptType>
+          </commandScript>
+          <configFiles>
+            <configFile>
+              <type>xml</type>
+              <fileName>hive-site.xml</fileName>
+              <dictionaryName>hive-site</dictionaryName>
+            </configFile>
+            <configFile>
+              <type>env</type>
+              <fileName>hive-env.sh</fileName>
+              <dictionaryName>hive-env</dictionaryName>
+            </configFile>
+            <configFile>
+              <type>env</type>
+              <fileName>hive-log4j.properties</fileName>
+              <dictionaryName>hive-log4j</dictionaryName>
+            </configFile>
+            <configFile>
+              <type>env</type>
+              <fileName>hive-exec-log4j.properties</fileName>
+              <dictionaryName>hive-exec-log4j</dictionaryName>
+            </configFile>
+          </configFiles>
+        </component>
       </components>
 
       <osSpecifics>

+ 0 - 126
ambari-server/src/test/resources/stacks/HDP/2.0.5/services/WEBHCAT/configuration/webhcat-site.xml

@@ -1,126 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- 
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-
-<!-- The default settings for Templeton. -->
-<!-- Edit templeton-site.xml to change settings for your local -->
-<!-- install. -->
-
-<configuration supports_final="true">
-
-  <property>
-    <name>templeton.port</name>
-      <value>50111</value>
-    <description>The HTTP port for the main server.</description>
-  </property>
-
-  <property>
-    <name>templeton.hadoop.conf.dir</name>
-    <value>/etc/hadoop/conf</value>
-    <description>The path to the Hadoop configuration.</description>
-  </property>
-
-  <property>
-    <name>templeton.jar</name>
-    <value>/usr/lib/hcatalog/share/webhcat/svr/webhcat.jar</value>
-    <description>The path to the Templeton jar file.</description>
-  </property>
-
-  <property>
-    <name>templeton.libjars</name>
-    <value>/usr/lib/zookeeper/zookeeper.jar</value>
-    <description>Jars to add the the classpath.</description>
-  </property>
-
-
-  <property>
-    <name>templeton.hadoop</name>
-    <value>/usr/bin/hadoop</value>
-    <description>The path to the Hadoop executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.pig.archive</name>
-    <value>hdfs:///apps/webhcat/pig.tar.gz</value>
-    <description>The path to the Pig archive.</description>
-  </property>
-
-  <property>
-    <name>templeton.pig.path</name>
-    <value>pig.tar.gz/pig/bin/pig</value>
-    <description>The path to the Pig executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hcat</name>
-    <value>/usr/bin/hcat</value>
-    <description>The path to the hcatalog executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.archive</name>
-    <value>hdfs:///apps/webhcat/hive.tar.gz</value>
-    <description>The path to the Hive archive.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.path</name>
-    <value>hive.tar.gz/hive/bin/hive</value>
-    <description>The path to the Hive executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.properties</name>
-    <value></value>
-    <description>Properties to set when running hive.</description>
-  </property>
-
-
-  <property>
-    <name>templeton.zookeeper.hosts</name>
-    <value></value>
-    <description>ZooKeeper servers, as comma separated host:port pairs</description>
-  </property>
-
-  <property>
-    <name>templeton.storage.class</name>
-    <value>org.apache.hcatalog.templeton.tool.ZooKeeperStorage</value>
-    <description>The class to use as storage</description>
-  </property>
-
-  <property>
-   <name>templeton.override.enabled</name>
-   <value>false</value>
-   <description>
-     Enable the override path in templeton.override.jars
-   </description>
- </property>
-
- <property>
-    <name>templeton.streaming.jar</name>
-    <value>hdfs:///apps/webhcat/hadoop-streaming.jar</value>
-    <description>The hdfs path to the Hadoop streaming jar file.</description>
-  </property> 
-
-  <property>
-    <name>templeton.exec.timeout</name>
-    <value>60000</value>
-    <description>Time out for templeton api</description>
-  </property>
-
-</configuration>

+ 0 - 102
ambari-server/src/test/resources/stacks/HDP/2.0.5/services/WEBHCAT/metainfo.xml

@@ -1,102 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>WEBHCAT</name>
-      <comment>This is comment for WEBHCAT service</comment>
-      <version>0.11.0.2.0.5.0</version>
-
-      <components>
-        <component>
-          <name>WEBHCAT_SERVER</name>
-          <category>MASTER</category>
-          <cardinality>1</cardinality>
-          <dependencies>
-            <dependency>
-              <name>HDFS/HDFS_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>MAPREDUCE2/MAPREDUCE2_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>ZOOKEEPER/ZOOKEEPER_SERVER</name>
-              <scope>cluster</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-                <co-locate>WEBHCAT/WEBHCAT_SERVER</co-locate>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>ZOOKEEPER/ZOOKEEPER_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>YARN/YARN_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-          </dependencies>
-          <commandScript>
-            <script>scripts/webhcat_server.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>600</timeout>
-          </commandScript>
-        </component>
-      </components>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>hcatalog</name>
-            </package>
-            <package>
-              <name>webhcat-tar-hive</name>
-            </package>
-            <package>
-              <name>webhcat-tar-pig</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-      <configuration-dependencies>
-        <config-type>webhcat-site</config-type>
-      </configuration-dependencies>
-    </service>
-  </services>
-</metainfo>

+ 0 - 28
ambari-server/src/test/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml

@@ -1,28 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>WEBHCAT</name>
-      <displayName>WebHCat</displayName>
-      <comment>This is comment for WEBHCAT service</comment>
-      <version>0.11.0.2.0.5.0</version>
-    </service>
-  </services>
-</metainfo>

Файловите разлики са ограничени, защото са твърде много
+ 0 - 0
ambari-web/app/assets/data/alerts/alerts.json


+ 0 - 117
ambari-web/app/assets/data/dashboard/services.json

@@ -1055,42 +1055,6 @@
         }
       ]
     },
-    {
-      "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/services/WEBHCAT",
-      "ServiceInfo" : {
-        "cluster_name" : "cl1",
-        "service_name" : "WEBHCAT"
-      },
-      "components" : [
-        {
-          "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/services/WEBHCAT/components/WEBHCAT_SERVER",
-          "ServiceComponentInfo" : {
-            "cluster_name" : "cl1",
-            "desired_configs" : { },
-            "state" : "STARTED",
-            "component_name" : "WEBHCAT_SERVER",
-            "service_name" : "WEBHCAT"
-          },
-          "host_components" : [
-            {
-              "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/hosts/domU-12-31-39-0E-E6-01.compute-1.internal/host_components/WEBHCAT_SERVER",
-              "HostRoles" : {
-                "configs" : {
-                  "global" : "version1",
-                  "webhcat-site" : "version1"
-                },
-                "cluster_name" : "cl1",
-                "desired_configs" : { },
-                "desired_state" : "STARTED",
-                "state" : "STARTED",
-                "component_name" : "WEBHCAT_SERVER",
-                "host_name" : "domU-12-31-39-0E-E6-01.compute-1.internal"
-              }
-            }
-          ]
-        }
-      ]
-    },
     {
       "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/services/HUE",
       "ServiceInfo" : {
@@ -1505,87 +1469,6 @@
         }
       ]
     },
-    {
-      "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/services/HCATALOG",
-      "ServiceInfo" : {
-        "cluster_name" : "cl1",
-        "service_name" : "HCATALOG"
-      },
-      "components" : [
-        {
-          "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/services/HCATALOG/components/HCAT",
-          "ServiceComponentInfo" : {
-            "cluster_name" : "cl1",
-            "desired_configs" : { },
-            "state" : "INSTALLED",
-            "component_name" : "HCAT",
-            "service_name" : "HCATALOG"
-          },
-          "host_components" : [
-            {
-              "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/hosts/domU-12-31-39-16-48-4B.compute-1.internal/host_components/HCAT",
-              "HostRoles" : {
-                "configs" : { },
-                "cluster_name" : "cl1",
-                "desired_configs" : { },
-                "desired_state" : "INSTALLED",
-                "state" : "INSTALLED",
-                "component_name" : "HCAT",
-                "host_name" : "domU-12-31-39-16-48-4B.compute-1.internal"
-              }
-            },
-            {
-              "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/hosts/domU-12-31-39-0E-E6-01.compute-1.internal/host_components/HCAT",
-              "HostRoles" : {
-                "configs" : { },
-                "cluster_name" : "cl1",
-                "desired_configs" : { },
-                "desired_state" : "INSTALLED",
-                "state" : "INSTALLED",
-                "component_name" : "HCAT",
-                "host_name" : "domU-12-31-39-0E-E6-01.compute-1.internal"
-              }
-            },
-            {
-              "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/hosts/ip-10-110-79-42.ec2.internal/host_components/HCAT",
-              "HostRoles" : {
-                "configs" : { },
-                "cluster_name" : "cl1",
-                "desired_configs" : { },
-                "desired_state" : "INSTALLED",
-                "state" : "INSTALLED",
-                "component_name" : "HCAT",
-                "host_name" : "ip-10-110-79-42.ec2.internal"
-              }
-            },
-            {
-              "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/hosts/ip-10-110-38-164.ec2.internal/host_components/HCAT",
-              "HostRoles" : {
-                "configs" : { },
-                "cluster_name" : "cl1",
-                "desired_configs" : { },
-                "desired_state" : "INSTALLED",
-                "state" : "INSTALLED",
-                "component_name" : "HCAT",
-                "host_name" : "ip-10-110-38-164.ec2.internal"
-              }
-            },
-            {
-              "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/hosts/ip-10-191-202-42.ec2.internal/host_components/HCAT",
-              "HostRoles" : {
-                "configs" : { },
-                "cluster_name" : "cl1",
-                "desired_configs" : { },
-                "desired_state" : "INSTALLED",
-                "state" : "INSTALLED",
-                "component_name" : "HCAT",
-                "host_name" : "ip-10-191-202-42.ec2.internal"
-              }
-            }
-          ]
-        }
-      ]
-    },
     {
       "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/services/ZOOKEEPER",
       "ServiceInfo" : {

+ 1 - 10
ambari-web/app/assets/data/hosts/HDP2/hosts.json

@@ -113,15 +113,6 @@
             "state" : "INSTALL_FAILED"
           }
         },
-        {
-          "HostRoles" : {
-            "component_name" : "HCAT",
-            "maintenance_state" : "OFF",
-            "service_name" : "HCATALOG",
-            "stale_configs" : false,
-            "state" : "INSTALL_FAILED"
-          }
-        },
         {
           "HostRoles" : {
             "component_name" : "HDFS_CLIENT",
@@ -315,7 +306,7 @@
           "HostRoles" : {
             "component_name" : "WEBHCAT_SERVER",
             "maintenance_state" : "OFF",
-            "service_name" : "WEBHCAT",
+            "service_name" : "HIVE",
             "stale_configs" : false,
             "state" : "INSTALL_FAILED"
           }

+ 0 - 9
ambari-web/app/assets/data/services/HDP2/components_state.json

@@ -36,15 +36,6 @@
         "total_count" : 1
       }
     },
-    {
-      "ServiceComponentInfo" : {
-        "component_name" : "HCAT",
-        "installed_count" : 1,
-        "service_name" : "HCATALOG",
-        "started_count" : 0,
-        "total_count" : 1
-      }
-    },
     {
       "ServiceComponentInfo" : {
         "component_name" : "DATANODE",

+ 0 - 16
ambari-web/app/assets/data/services/HDP2/services.json

@@ -49,22 +49,6 @@
         }
       }
     },
-    {
-      "href" : "http://192.168.56.101:8080/api/v1/clusters/cl/services/HCATALOG",
-      "ServiceInfo" : {
-        "maintenance_state" : "OFF",
-        "cluster_name" : "cl",
-        "service_name" : "HCATALOG",
-        "state": "STARTED"
-      },
-      "alerts" : {
-        "summary" : {
-          "CRITICAL" : 0,
-          "OK" : 0,
-          "WARNING" : 0
-        }
-      }
-    },
     {
       "href" : "http://192.168.56.101:8080/api/v1/clusters/cl/services/HDFS",
       "ServiceInfo" : {

+ 1 - 117
ambari-web/app/assets/data/services/host_component_actual_configs.json

@@ -1078,42 +1078,7 @@
         }
       ]
     },
-    {
-      "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/services/WEBHCAT",
-      "ServiceInfo" : {
-        "cluster_name" : "cl1",
-        "service_name" : "WEBHCAT"
-      },
-      "components" : [
-        {
-          "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/services/WEBHCAT/components/WEBHCAT_SERVER",
-          "ServiceComponentInfo" : {
-            "cluster_name" : "cl1",
-            "desired_configs" : { },
-            "state" : "STARTED",
-            "component_name" : "WEBHCAT_SERVER",
-            "service_name" : "WEBHCAT"
-          },
-          "host_components" : [
-            {
-              "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/hosts/domU-12-31-39-0E-E6-01.compute-1.internal/host_components/WEBHCAT_SERVER",
-              "HostRoles" : {
-                "configs" : {
-                  "global" : "version1",
-                  "webhcat-site" : "version1"
-                },
-                "cluster_name" : "cl1",
-                "desired_configs" : { },
-                "desired_state" : "STARTED",
-                "state" : "STARTED",
-                "component_name" : "WEBHCAT_SERVER",
-                "host_name" : "domU-12-31-39-0E-E6-01.compute-1.internal"
-              }
-            }
-          ]
-        }
-      ]
-    },
+
     {
       "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/services/HUE",
       "ServiceInfo" : {
@@ -1456,87 +1421,6 @@
         }
       ]
     },
-    {
-      "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/services/HCATALOG",
-      "ServiceInfo" : {
-        "cluster_name" : "cl1",
-        "service_name" : "HCATALOG"
-      },
-      "components" : [
-        {
-          "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/services/HCATALOG/components/HCAT",
-          "ServiceComponentInfo" : {
-            "cluster_name" : "cl1",
-            "desired_configs" : { },
-            "state" : "INSTALLED",
-            "component_name" : "HCAT",
-            "service_name" : "HCATALOG"
-          },
-          "host_components" : [
-            {
-              "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/hosts/domU-12-31-39-16-48-4B.compute-1.internal/host_components/HCAT",
-              "HostRoles" : {
-                "configs" : { },
-                "cluster_name" : "cl1",
-                "desired_configs" : { },
-                "desired_state" : "INSTALLED",
-                "state" : "INSTALLED",
-                "component_name" : "HCAT",
-                "host_name" : "domU-12-31-39-16-48-4B.compute-1.internal"
-              }
-            },
-            {
-              "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/hosts/domU-12-31-39-0E-E6-01.compute-1.internal/host_components/HCAT",
-              "HostRoles" : {
-                "configs" : { },
-                "cluster_name" : "cl1",
-                "desired_configs" : { },
-                "desired_state" : "INSTALLED",
-                "state" : "INSTALLED",
-                "component_name" : "HCAT",
-                "host_name" : "domU-12-31-39-0E-E6-01.compute-1.internal"
-              }
-            },
-            {
-              "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/hosts/ip-10-110-79-42.ec2.internal/host_components/HCAT",
-              "HostRoles" : {
-                "configs" : { },
-                "cluster_name" : "cl1",
-                "desired_configs" : { },
-                "desired_state" : "INSTALLED",
-                "state" : "INSTALLED",
-                "component_name" : "HCAT",
-                "host_name" : "ip-10-110-79-42.ec2.internal"
-              }
-            },
-            {
-              "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/hosts/ip-10-110-38-164.ec2.internal/host_components/HCAT",
-              "HostRoles" : {
-                "configs" : { },
-                "cluster_name" : "cl1",
-                "desired_configs" : { },
-                "desired_state" : "INSTALLED",
-                "state" : "INSTALLED",
-                "component_name" : "HCAT",
-                "host_name" : "ip-10-110-38-164.ec2.internal"
-              }
-            },
-            {
-              "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/hosts/ip-10-191-202-42.ec2.internal/host_components/HCAT",
-              "HostRoles" : {
-                "configs" : { },
-                "cluster_name" : "cl1",
-                "desired_configs" : { },
-                "desired_state" : "INSTALLED",
-                "state" : "INSTALLED",
-                "component_name" : "HCAT",
-                "host_name" : "ip-10-191-202-42.ec2.internal"
-              }
-            }
-          ]
-        }
-      ]
-    },
     {
       "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/services/ZOOKEEPER",
       "ServiceInfo" : {

+ 1 - 1
ambari-web/app/assets/data/stacks/HDP-2.1/recommendations.json

@@ -6,7 +6,7 @@
     "stack_version": "2.1.1"
   },
   "hosts": ["dev1.hortonworks.com", "dev2.hortonworks.com", "dev3.hortonworks.com"],
-  "services": ["FALCON", "FLUME", "GANGLIA", "HBASE", "HCATALOG", "HDFS", "HIVE", "MAPREDUCE2", "NAGIOS", "OOZIE", "PIG", "SQOOP", "STORM", "TEZ", "WEBCHAT", "YARN", "ZOOKEEPER"],
+  "services": ["FALCON", "FLUME", "GANGLIA", "HBASE", "HDFS", "HIVE", "MAPREDUCE2", "NAGIOS", "OOZIE", "PIG", "SQOOP", "STORM", "TEZ", "WEBCHAT", "YARN", "ZOOKEEPER"],
   "recommendations": {
     "blueprint": {
       "configurations": {

+ 1 - 1
ambari-web/app/assets/data/stacks/HDP-2.1/recommendations_configs.json

@@ -1,6 +1,6 @@
 {
   "hosts": ["ab2test-5.c.pramod-thangali.internal", "ab2test-6.c.pramod-thangali.internal", "ab2test-7.c.pramod-thangali.internal"],
-  "services": ["HDFS", "MAPREDUCE2", "YARN", "TEZ", "NAGIOS", "GANGLIA", "HIVE", "HCATALOG", "WEBHCAT", "SQOOP", "OOZIE", "ZOOKEEPER", "FALCON", "STORM", "FLUME", "PIG"],
+  "services": ["HDFS", "MAPREDUCE2", "YARN", "TEZ", "NAGIOS", "GANGLIA", "HIVE", "SQOOP", "OOZIE", "ZOOKEEPER", "FALCON", "STORM", "FLUME", "PIG"],
   "recommendations": {
   "blueprint": {
     "host_groups": [

+ 83 - 135
ambari-web/app/assets/data/stacks/HDP-2.1/service_components.json

@@ -319,42 +319,6 @@
         }
       ]
     },
-    {
-      "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/HCATALOG",
-      "StackServices" : {
-        "comments" : "This is comment for HCATALOG service",
-        "custom_commands" : [ ],
-        "display_name" : "HCatalog",
-        "required_services" : [
-          "HIVE"
-        ],
-        "service_check_supported" : true,
-        "service_name" : "HCATALOG",
-        "service_version" : "0.12.0.2.1",
-        "stack_name" : "HDP",
-        "stack_version" : "2.1",
-        "user_name" : null,
-        "config_types" : { }
-      },
-      "serviceComponents" : [
-        {
-          "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/HCATALOG/components/HCAT",
-          "StackServiceComponents" : {
-            "cardinality" : null,
-            "component_category" : "CLIENT",
-            "component_name" : "HCAT",
-            "custom_commands" : [ ],
-            "display_name" : "HCat",
-            "is_client" : true,
-            "is_master" : false,
-            "service_name" : "HCATALOG",
-            "stack_name" : "HDP",
-            "stack_version" : "2.1"
-          },
-          "dependencies" : [ ]
-        }
-      ]
-    },
     {
       "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/HDFS",
       "StackServices" : {
@@ -557,6 +521,22 @@
           },
           "dependencies" : [ ]
         },
+        {
+          "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/HCATALOG/components/HCAT",
+          "StackServiceComponents" : {
+            "cardinality" : null,
+            "component_category" : "CLIENT",
+            "component_name" : "HCAT",
+            "custom_commands" : [ ],
+            "display_name" : "HCat",
+            "is_client" : true,
+            "is_master" : false,
+            "service_name" : "HCATALOG",
+            "stack_name" : "HDP",
+            "stack_version" : "2.1"
+          },
+          "dependencies" : [ ]
+        },
         {
           "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/HIVE/components/HIVE_METASTORE",
           "StackServiceComponents" : {
@@ -634,6 +614,73 @@
             }
           ]
         },
+        {
+          "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT/components/WEBHCAT_SERVER",
+          "StackServiceComponents" : {
+            "cardinality" : "1",
+            "component_category" : "MASTER",
+            "component_name" : "WEBHCAT_SERVER",
+            "custom_commands" : [ ],
+            "display_name" : "WebHCat Server",
+            "is_client" : false,
+            "is_master" : true,
+            "service_name" : "WEBHCAT",
+            "stack_name" : "HDP",
+            "stack_version" : "2.1"
+          },
+          "dependencies" : [
+            {
+              "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT/components/WEBHCAT_SERVER/dependencies/HDFS_CLIENT",
+              "Dependencies" : {
+                "component_name" : "HDFS_CLIENT",
+                "dependent_component_name" : "WEBHCAT_SERVER",
+                "dependent_service_name" : "WEBHCAT",
+                "stack_name" : "HDP",
+                "stack_version" : "2.1"
+              }
+            },
+            {
+              "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT/components/WEBHCAT_SERVER/dependencies/MAPREDUCE2_CLIENT",
+              "Dependencies" : {
+                "component_name" : "MAPREDUCE2_CLIENT",
+                "dependent_component_name" : "WEBHCAT_SERVER",
+                "dependent_service_name" : "WEBHCAT",
+                "stack_name" : "HDP",
+                "stack_version" : "2.1"
+              }
+            },
+            {
+              "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT/components/WEBHCAT_SERVER/dependencies/YARN_CLIENT",
+              "Dependencies" : {
+                "component_name" : "YARN_CLIENT",
+                "dependent_component_name" : "WEBHCAT_SERVER",
+                "dependent_service_name" : "WEBHCAT",
+                "stack_name" : "HDP",
+                "stack_version" : "2.1"
+              }
+            },
+            {
+              "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT/components/WEBHCAT_SERVER/dependencies/ZOOKEEPER_CLIENT",
+              "Dependencies" : {
+                "component_name" : "ZOOKEEPER_CLIENT",
+                "dependent_component_name" : "WEBHCAT_SERVER",
+                "dependent_service_name" : "WEBHCAT",
+                "stack_name" : "HDP",
+                "stack_version" : "2.1"
+              }
+            },
+            {
+              "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT/components/WEBHCAT_SERVER/dependencies/ZOOKEEPER_SERVER",
+              "Dependencies" : {
+                "component_name" : "ZOOKEEPER_SERVER",
+                "dependent_component_name" : "WEBHCAT_SERVER",
+                "dependent_service_name" : "WEBHCAT",
+                "stack_name" : "HDP",
+                "stack_version" : "2.1"
+              }
+            }
+          ]
+        },
         {
           "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/HIVE/components/MYSQL_SERVER",
           "StackServiceComponents" : {
@@ -1235,105 +1282,6 @@
         }
       ]
     },
-    {
-      "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT",
-      "StackServices" : {
-        "comments" : "This is comment for WEBHCAT service",
-        "custom_commands" : [ ],
-        "display_name" : "WebHCat",
-        "required_services" : [
-          "HIVE",
-          "ZOOKEEPER"
-        ],
-        "service_check_supported" : true,
-        "service_name" : "WEBHCAT",
-        "service_version" : "0.13.0.2.1",
-        "stack_name" : "HDP",
-        "stack_version" : "2.1",
-        "user_name" : null,
-        "config_types" : {
-          "webhcat-env" : {
-            "supports" : {
-              "final" : "false"
-            }
-          },
-          "webhcat-site" : {
-            "supports" : {
-              "final" : "true"
-            }
-          }
-        }
-      },
-      "serviceComponents" : [
-        {
-          "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT/components/WEBHCAT_SERVER",
-          "StackServiceComponents" : {
-            "cardinality" : "1",
-            "component_category" : "MASTER",
-            "component_name" : "WEBHCAT_SERVER",
-            "custom_commands" : [ ],
-            "display_name" : "WebHCat Server",
-            "is_client" : false,
-            "is_master" : true,
-            "service_name" : "WEBHCAT",
-            "stack_name" : "HDP",
-            "stack_version" : "2.1"
-          },
-          "dependencies" : [
-            {
-              "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT/components/WEBHCAT_SERVER/dependencies/HDFS_CLIENT",
-              "Dependencies" : {
-                "component_name" : "HDFS_CLIENT",
-                "dependent_component_name" : "WEBHCAT_SERVER",
-                "dependent_service_name" : "WEBHCAT",
-                "stack_name" : "HDP",
-                "stack_version" : "2.1"
-              }
-            },
-            {
-              "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT/components/WEBHCAT_SERVER/dependencies/MAPREDUCE2_CLIENT",
-              "Dependencies" : {
-                "component_name" : "MAPREDUCE2_CLIENT",
-                "dependent_component_name" : "WEBHCAT_SERVER",
-                "dependent_service_name" : "WEBHCAT",
-                "stack_name" : "HDP",
-                "stack_version" : "2.1"
-              }
-            },
-            {
-              "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT/components/WEBHCAT_SERVER/dependencies/YARN_CLIENT",
-              "Dependencies" : {
-                "component_name" : "YARN_CLIENT",
-                "dependent_component_name" : "WEBHCAT_SERVER",
-                "dependent_service_name" : "WEBHCAT",
-                "stack_name" : "HDP",
-                "stack_version" : "2.1"
-              }
-            },
-            {
-              "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT/components/WEBHCAT_SERVER/dependencies/ZOOKEEPER_CLIENT",
-              "Dependencies" : {
-                "component_name" : "ZOOKEEPER_CLIENT",
-                "dependent_component_name" : "WEBHCAT_SERVER",
-                "dependent_service_name" : "WEBHCAT",
-                "stack_name" : "HDP",
-                "stack_version" : "2.1"
-              }
-            },
-            {
-              "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT/components/WEBHCAT_SERVER/dependencies/ZOOKEEPER_SERVER",
-              "Dependencies" : {
-                "component_name" : "ZOOKEEPER_SERVER",
-                "dependent_component_name" : "WEBHCAT_SERVER",
-                "dependent_service_name" : "WEBHCAT",
-                "stack_name" : "HDP",
-                "stack_version" : "2.1"
-              }
-            }
-          ]
-        }
-      ]
-    },
     {
       "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/YARN",
       "StackServices" : {

+ 0 - 18
ambari-web/app/assets/data/wizard/stack/hdp/version/1.2.0.json

@@ -246,24 +246,6 @@
       "client" : false,
       "master" : true
     }
-  }, {
-    "name" : "HCATALOG",
-    "version" : "0.4.0.1-1",
-    "user" : "root",
-    "comment" : "This is comment for HCATALOG service",
-    "components" : [ {
-      "name" : "HCAT",
-      "category" : "CLIENT",
-      "client" : true,
-      "master" : false
-    } ],
-    "clientOnlyService" : true,
-    "clientComponent" : {
-      "name" : "HCAT",
-      "category" : "CLIENT",
-      "client" : true,
-      "master" : false
-    }
   }, {
     "name" : "HIVE",
     "version" : "0.9.0.1-1",

+ 0 - 11
ambari-web/app/assets/data/wizard/stack/hdp/version/1.2.1.json

@@ -111,17 +111,6 @@
         "service_version" : "3.2.0"
       }
     },
-    {
-      "href" : "http://192.168.56.101:8080/api/v1/stacks2/HDP/versions/1.2.1/stackServices/HCATALOG",
-      "StackServices" : {
-        "user_name" : "root",
-        "stack_version" : "1.2.1",
-        "service_name" : "HCATALOG",
-        "stack_name" : "HDP",
-        "comments" : "This is comment for HCATALOG service",
-        "service_version" : "0.5.0"
-      }
-    },
     {
       "href" : "http://192.168.56.101:8080/api/v1/stacks2/HDP/versions/1.2.1/stackServices/HIVE",
       "StackServices" : {

+ 0 - 11
ambari-web/app/assets/data/wizard/stack/hdp/version/1.3.0.json

@@ -111,17 +111,6 @@
         "service_version" : "3.4.5"
       }
     },
-    {
-      "href" : "http://ec2-23-20-124-167.compute-1.amazonaws.com:8080/api/v1/stacks2/HDP/versions/1.2.1/stackServices/HCATALOG",
-      "StackServices" : {
-        "user_name" : "root",
-        "stack_version" : "1.2.1",
-        "service_name" : "HCATALOG",
-        "stack_name" : "HDP",
-        "comments" : "This is comment for HCATALOG service",
-        "service_version" : "0.5.0"
-      }
-    },
     {
       "href" : "http://ec2-23-20-124-167.compute-1.amazonaws.com:8080/api/v1/stacks2/HDP/versions/1.2.1/stackServices/GANGLIA",
       "StackServices" : {

+ 0 - 11
ambari-web/app/assets/data/wizard/stack/hdp/version/1.3.1.json

@@ -122,17 +122,6 @@
         "service_version" : "3.4.5"
       }
     },
-    {
-      "href" : "http://ec2-23-20-124-167.compute-1.amazonaws.com:8080/api/v1/stacks2/HDP/versions/1.2.1/stackServices/HCATALOG",
-      "StackServices" : {
-        "user_name" : "root",
-        "stack_version" : "1.2.1",
-        "service_name" : "HCATALOG",
-        "stack_name" : "HDP",
-        "comments" : "This is comment for HCATALOG service",
-        "service_version" : "0.5.0"
-      }
-    },
     {
       "href" : "http://ec2-23-20-124-167.compute-1.amazonaws.com:8080/api/v1/stacks2/HDP/versions/1.2.1/stackServices/GANGLIA",
       "StackServices" : {

+ 0 - 11
ambari-web/app/assets/data/wizard/stack/hdp/version/2.0.1.json

@@ -34,17 +34,6 @@
         "service_version" : "0.10.1.22-1"
       }
     },
-    {
-      "href" : "http://192.168.56.101:8080/api/v1/stacks2/HDP/versions/2.0.1/stackServices/HCATALOG",
-      "StackServices" : {
-        "user_name" : "root",
-        "stack_version" : "2.0.1",
-        "service_name" : "HCATALOG",
-        "stack_name" : "HDP",
-        "comments" : "This is comment for HCATALOG service",
-        "service_version" : "0.5.0.22-1"
-      }
-    },
     {
       "href" : "http://192.168.56.101:8080/api/v1/stacks2/HDP/versions/2.0.1/stackServices/WEBHCAT",
       "StackServices" : {

+ 0 - 11
ambari-web/app/assets/data/wizard/stack/hdp/version/2.0.5.json

@@ -34,17 +34,6 @@
         "service_version" : "0.10.1.22-1"
       }
     },
-    {
-      "href" : "http://192.168.56.101:8080/api/v1/stacks2/HDP/versions/2.0.1/stackServices/HCATALOG",
-      "StackServices" : {
-        "user_name" : "root",
-        "stack_version" : "2.0.1",
-        "service_name" : "HCATALOG",
-        "stack_name" : "HDP",
-        "comments" : "This is comment for HCATALOG service",
-        "service_version" : "0.5.0.22-1"
-      }
-    },
     {
       "href" : "http://192.168.56.101:8080/api/v1/stacks2/HDP/versions/2.0.1/stackServices/WEBHCAT",
       "StackServices" : {

+ 0 - 17
ambari-web/app/assets/data/wizard/stack/hdp/version0.1.json

@@ -214,23 +214,6 @@
       "client" : false,
       "master" : true
     }
-  }, {
-    "name" : "HCATALOG",
-    "version" : "1.0",
-    "user" : "root",
-    "comment" : "This is comment for HCATALOG service",
-    "components" : [ {
-      "name" : "HCAT",
-      "category" : "CLIENT",
-      "client" : true,
-      "master" : false
-    } ],
-    "clientComponent" : {
-      "name" : "HCAT",
-      "category" : "CLIENT",
-      "client" : true,
-      "master" : false
-    }
   }, {
     "name" : "HIVE",
     "version" : "1.0",

+ 0 - 20
ambari-web/app/assets/data/wizard/stack/hdp/version01/HCATALOG.json

@@ -1,20 +0,0 @@
-{
-  "name" : "HCATALOG",
-  "version" : "0.4.0.1-1",
-  "user" : "root",
-  "comment" : "This is comment for HCATALOG service",
-  "properties" : [ ],
-  "components" : [ {
-    "name" : "HCAT",
-    "category" : "CLIENT",
-    "client" : true,
-    "master" : false
-  } ],
-  "clientOnlyService" : true,
-  "clientComponent" : {
-    "name" : "HCAT",
-    "category" : "CLIENT",
-    "client" : true,
-    "master" : false
-  }
-}

+ 0 - 4
ambari-web/app/assets/data/wizard/stack/hdp/version1.2.1/HCATALOG.json

@@ -1,4 +0,0 @@
-{
-  "href" : "http://192.168.56.101:8080/api/v1/stacks2/HDP/versions/1.2.1/stackServices/HCATALOG/configurations?fields=*",
-  "items" : [ ]
-}

+ 0 - 4
ambari-web/app/assets/data/wizard/stack/hdp/version1.3.0/HCATALOG.json

@@ -1,4 +0,0 @@
-{
-  "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.3.0/stackServices/HCATALOG/configurations?fields=*",
-  "items" : [ ]
-}

+ 0 - 4
ambari-web/app/assets/data/wizard/stack/hdp/version131/HCATALOG.json

@@ -1,4 +0,0 @@
-{
-  "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.3.0/stackServices/HCATALOG/configurations?fields=*",
-  "items" : [ ]
-}

+ 0 - 4
ambari-web/app/assets/data/wizard/stack/hdp/version2.0.1/HCATALOG.json

@@ -1,4 +0,0 @@
-{
-  "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.3.0/stackServices/HCATALOG/configurations?fields=*",
-  "items" : [ ]
-}

+ 0 - 55
ambari-web/app/assets/data/wizard/stack/stacks.json

@@ -20,17 +20,6 @@
             "comments" : "System for workflow coordination and execution of Apache Hadoop jobs"
           }
         },
-        {
-          "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.3.0/stackServices/HCATALOG",
-          "StackServices" : {
-            "user_name" : "root",
-            "stack_version" : "1.3.0",
-            "service_name" : "HCATALOG",
-            "stack_name" : "HDP",
-            "service_version" : "0.5.0",
-            "comments" : "This is comment for HCATALOG service"
-          }
-        },
         {
           "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.3.0/stackServices/MAPREDUCE",
           "StackServices" : {
@@ -97,17 +86,6 @@
             "comments" : "Ganglia Metrics Collection system"
           }
         },
-        {
-          "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.3.0/stackServices/WEBHCAT",
-          "StackServices" : {
-            "user_name" : "root",
-            "stack_version" : "1.3.0",
-            "service_name" : "WEBHCAT",
-            "stack_name" : "HDP",
-            "service_version" : "0.5.0",
-            "comments" : "This is comment for WEBHCAT service"
-          }
-        },
         {
           "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.3.0/stackServices/SQOOP",
           "StackServices" : {
@@ -195,17 +173,6 @@
             "comments" : "This is comment for ZOOKEEPER service"
           }
         },
-        {
-          "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.2.1/stackServices/HCATALOG",
-          "StackServices" : {
-            "user_name" : "root",
-            "stack_version" : "1.2.1",
-            "service_name" : "HCATALOG",
-            "stack_name" : "HDP",
-            "service_version" : "0.5.0",
-            "comments" : "This is comment for HCATALOG service"
-          }
-        },
         {
           "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.2.1/stackServices/HDFS",
           "StackServices" : {
@@ -272,17 +239,6 @@
             "comments" : "Apache Hadoop Distributed Processing Framework"
           }
         },
-        {
-          "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.2.1/stackServices/WEBHCAT",
-          "StackServices" : {
-            "user_name" : "root",
-            "stack_version" : "1.2.1",
-            "service_name" : "WEBHCAT",
-            "stack_name" : "HDP",
-            "service_version" : "0.5.0",
-            "comments" : "This is comment for WEBHCAT service"
-          }
-        },
         {
           "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.2.1/stackServices/SQOOP",
           "StackServices" : {
@@ -359,17 +315,6 @@
             "comments" : "Nagios Monitoring and Alerting system"
           }
         },
-        {
-          "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.2.0/stackServices/HCATALOG",
-          "StackServices" : {
-            "user_name" : "root",
-            "stack_version" : "1.2.0",
-            "service_name" : "HCATALOG",
-            "stack_name" : "HDP",
-            "service_version" : "0.5.0",
-            "comments" : "This is comment for HCATALOG service"
-          }
-        },
         {
           "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.2.0/stackServices/WEBHCAT",
           "StackServices" : {

+ 2 - 2
ambari-web/app/controllers/main/admin/security/add/step2.js

@@ -76,7 +76,7 @@ App.MainAdminSecurityAddStep2Controller = Em.Controller.extend({
       components: ['HIVE_SERVER']
     },
     {
-      serviceName: 'WEBHCAT',
+      serviceName: 'HIVE',
       configName: 'webhcatserver_host',
       components: ['WEBHCAT_SERVER']
     },
@@ -168,7 +168,7 @@ App.MainAdminSecurityAddStep2Controller = Em.Controller.extend({
       primaryName: 'HTTP/'
     },
     {
-      serviceName: 'WEBHCAT',
+      serviceName: 'HIVE',
       configName: 'webhcatserver_host',
       principalName: 'webHCat_http_principal_name',
       primaryName: 'HTTP/'

+ 1 - 1
ambari-web/app/controllers/main/admin/serviceAccounts_controller.js

@@ -122,7 +122,7 @@ App.MainAdminServiceAccountsController = App.MainServiceInfoConfigsController.ex
     var proxyUserGroup = misc_configs.findProperty('name', 'proxyuser_group');
     //stack, with version lower than 2.1, doesn't have Falcon service
     if (proxyUserGroup) {
-      var proxyServices = ['HIVE', 'WEBHCAT', 'OOZIE', 'FALCON'];
+      var proxyServices = ['HIVE', 'OOZIE', 'FALCON'];
       var services = Em.A([]);
       proxyServices.forEach(function (serviceName) {
         var stackService = App.StackService.find(serviceName);

+ 4 - 4
ambari-web/app/controllers/main/service/info/configs.js

@@ -1698,7 +1698,7 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
   addDynamicProperties: function (configs) {
     var allConfigs = this.get('stepConfigs').findProperty('serviceName', this.get('content.serviceName')).get('configs');
     var templetonHiveProperty = allConfigs.someProperty('name', 'templeton.hive.properties');
-    if (!templetonHiveProperty && this.get('content.serviceName') === 'WEBHCAT') {
+    if (!templetonHiveProperty && this.get('content.serviceName') === 'HIVE') {
       configs.pushObject({
         "name": "templeton.hive.properties",
         "templateName": ["hivemetastore_host"],
@@ -2136,7 +2136,7 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
       hostProperty: 'hivemetastore_host',
       componentName: 'HIVE_SERVER',
       serviceName: 'HIVE',
-      serviceUseThis: ['WEBHCAT']
+      serviceUseThis: ['HIVE']
     },
     {
       hostProperty: 'oozieserver_host',
@@ -2160,7 +2160,7 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
     {
       hostProperty: 'webhcatserver_host',
       componentName: 'WEBHCAT_SERVER',
-      serviceName: 'WEBHCAT',
+      serviceName: 'HIVE',
       serviceUseThis: [],
       m: true
     },
@@ -2168,7 +2168,7 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
       hostProperty: 'zookeeperserver_hosts',
       componentName: 'ZOOKEEPER_SERVER',
       serviceName: 'ZOOKEEPER',
-      serviceUseThis: ['HBASE', 'WEBHCAT'],
+      serviceUseThis: ['HBASE', 'HIVE'],
       m: true
     },
     {

+ 1 - 1
ambari-web/app/controllers/main/service/item.js

@@ -96,7 +96,7 @@ App.MainServiceItemController = Em.Controller.extend({
   }.property('content.serviceName'),
 
   isConfigurable: function () {
-    return !App.get('services.noConfigTypes').concat('HCATALOG').contains(this.get('content.serviceName'));
+    return !App.get('services.noConfigTypes').contains(this.get('content.serviceName'));
   }.property('App.services.noConfigTypes','content.serviceName'),
 
   allHosts: [],

Някои файлове не бяха показани, защото твърде много файлове са промени