Forráskód Böngészése

AMBARI-14739. Capture package requirements that are optional and config dependent (aonishuk)

Andrew Onishuk 9 éve
szülő
commit
9f76f6019d
47 módosított fájl, 184 hozzáadás és 80 törlés
  1. 2 2
      ambari-agent/src/test/python/resource_management/TestScript.py
  2. 1 4
      ambari-common/src/main/python/resource_management/libraries/functions/get_lzo_packages.py
  3. 64 0
      ambari-common/src/main/python/resource_management/libraries/functions/package_conditions.py
  4. 19 2
      ambari-common/src/main/python/resource_management/libraries/script/script.py
  5. 1 0
      ambari-funtest/src/test/resources/stacks/HDP/2.1.1/services/AMBARI_METRICS/metainfo.xml
  6. 15 2
      ambari-server/src/main/java/org/apache/ambari/server/state/ServiceOsSpecific.java
  7. 3 1
      ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_monitor.py
  8. 2 2
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py
  9. 2 1
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
  10. 2 1
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py
  11. 0 5
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
  12. 2 1
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
  13. 4 0
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/metainfo.xml
  14. 2 2
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
  15. 2 2
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
  16. 2 2
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
  17. 2 2
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
  18. 1 1
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
  19. 0 4
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
  20. 1 1
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
  21. 1 1
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
  22. 4 0
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/metainfo.xml
  23. 1 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
  24. 1 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
  25. 1 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
  26. 1 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
  27. 1 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/mysql_server.py
  28. 1 17
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
  29. 0 2
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py
  30. 1 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
  31. 6 0
      ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/metainfo.xml
  32. 1 1
      ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/kerberos_client.py
  33. 1 0
      ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/metainfo.xml
  34. 4 13
      ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
  35. 4 0
      ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/metainfo.xml
  36. 2 0
      ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml
  37. 4 0
      ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml
  38. 5 0
      ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml
  39. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml
  40. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.2/services/SQOOP/metainfo.xml
  41. 3 0
      ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/metainfo.xml
  42. 7 0
      ambari-server/src/main/resources/stacks/HDP/2.3/services/HIVE/metainfo.xml
  43. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.3/services/OOZIE/metainfo.xml
  44. 2 2
      ambari-server/src/test/python/custom_actions/configs/install_packages_config.json
  45. 1 1
      ambari-server/src/test/python/stacks/2.0.6/configs/hbase_no_phx.json
  46. 1 1
      ambari-server/src/test/python/stacks/2.0.6/configs/hbase_with_phx.json
  47. 1 1
      ambari-server/src/test/python/stacks/2.3/configs/pxf_default.json

+ 2 - 2
ambari-agent/src/test/python/resource_management/TestScript.py

@@ -59,8 +59,8 @@ class TestScript(TestCase):
     }
     dummy_config = {
       'hostLevelParams' : {
-        'package_list' : "[{\"type\":\"rpm\",\"name\":\"hbase\"},"
-                         "{\"type\":\"rpm\",\"name\":\"yet-another-package\"}]",
+        'package_list' : "[{\"type\":\"rpm\",\"name\":\"hbase\", \"condition\": \"\"},"
+                         "{\"type\":\"rpm\",\"name\":\"yet-another-package\", \"condition\": \"\"}]",
         'repo_info' : "[{\"baseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\",\"osType\":\"centos6\",\"repoId\":\"HDP-2.0._\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\"}]",
         'service_repo_info' : "[{\"mirrorsList\":\"abc\",\"osType\":\"centos6\",\"repoId\":\"HDP-2.0._\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\"}]"
       }

+ 1 - 4
ambari-common/src/main/python/resource_management/libraries/functions/get_lzo_packages.py

@@ -38,10 +38,7 @@ def get_lzo_packages(stack_version_unformatted):
   hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
   if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
-    if OSCheck.is_redhat_family() or OSCheck.is_suse_family():
-      lzo_packages += [format("hadooplzo_*")]
-    elif OSCheck.is_ubuntu_family():
-      lzo_packages += [format("hadooplzo_*")]
+    lzo_packages += ["hadooplzo_*"]
   else:
     lzo_packages += ["hadoop-lzo"]
 

+ 64 - 0
ambari-common/src/main/python/resource_management/libraries/functions/package_conditions.py

@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+__all__ = ["is_lzo_enabled", "should_install_phoenix", "should_install_ams_collector", "should_install_mysql", "should_install_mysl_connector"]
+
+import os
+from resource_management.libraries.script import Script
+from resource_management.libraries.functions.default import default
+
+def should_install_lzo():
+  config = Script.get_config()
+  io_compression_codecs = default("/configurations/core-site/io.compression.codecs", None)
+  lzo_enabled = io_compression_codecs is not None and "com.hadoop.compression.lzo" in io_compression_codecs.lower()
+  return lzo_enabled
+
+def should_install_phoenix():
+  phoenix_hosts = default('/clusterHostInfo/phoenix_query_server_hosts', [])
+  phoenix_enabled = default('/configurations/hbase-env/phoenix_sql_enabled', False)
+  has_phoenix = len(phoenix_hosts) > 0
+  return phoenix_enabled or has_phoenix
+
+def should_install_ams_collector():
+  config = Script.get_config()
+  return 'role' in config and config['role'] == "METRICS_COLLECTOR"
+
+def should_install_mysql():
+  config = Script.get_config()
+  hive_database = config['configurations']['hive-env']['hive_database']
+  hive_use_existing_db = hive_database.startswith('Existing')
+  
+  if hive_use_existing_db or 'role' in config and config['role'] != "MYSQL_SERVER":
+    return False
+  return True
+
+def should_install_mysl_connector():
+  mysql_jdbc_driver_jar = "/usr/share/java/mysql-connector-java.jar"
+  return not os.path.exists(mysql_jdbc_driver_jar)
+
+def should_install_hive_atlas():
+  atlas_hosts = default('/clusterHostInfo/atlas_server_hosts', [])
+  has_atlas = len(atlas_hosts) > 0
+  return has_atlas
+
+def should_install_kerberos_server():
+  config = Script.get_config()
+  return 'role' in config and config['role'] != "KERBEROS_CLIENT"

+ 19 - 2
ambari-common/src/main/python/resource_management/libraries/script/script.py

@@ -28,6 +28,7 @@ import logging
 import platform
 import inspect
 import tarfile
+import resource_management
 from ambari_commons import OSCheck, OSConst
 from ambari_commons.constants import UPGRADE_TYPE_NON_ROLLING, UPGRADE_TYPE_ROLLING
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
@@ -420,7 +421,7 @@ class Script(object):
     """
     self.install_packages(env)
 
-  def install_packages(self, env, exclude_packages=[]):
+  def install_packages(self, env):
     """
     List of packages that are required< by service is received from the server
     as a command parameter. The method installs all packages
@@ -442,7 +443,7 @@ class Script(object):
       if isinstance(package_list_str, basestring) and len(package_list_str) > 0:
         package_list = json.loads(package_list_str)
         for package in package_list:
-          if not Script.matches_any_regexp(package['name'], exclude_packages):
+          if Script.check_package_condition(package):
             name = self.format_package_name(package['name'])
             # HACK: On Windows, only install ambari-metrics packages using Choco Package Installer
             # TODO: Update this once choco packages for hadoop are created. This is because, service metainfo.xml support
@@ -464,6 +465,22 @@ class Script(object):
                           str(config['hostLevelParams']['stack_version']))
       reload_windows_env()
       
+  @staticmethod
+  def check_package_condition(package):
+    from resource_management.libraries.functions import package_conditions
+    condition = package['condition']
+    name = package['name']
+    
+    if not condition:
+      return True
+    
+    try:
+      chooser_method = getattr(package_conditions, condition)
+    except AttributeError:
+      raise Fail("Condition with name '{0}', when installing package {1}. Please check package_conditions.py.".format(condition, name))
+
+    return chooser_method()
+      
   @staticmethod
   def matches_any_regexp(string, regexp_list):
     for regex in regexp_list:

+ 1 - 0
ambari-funtest/src/test/resources/stacks/HDP/2.1.1/services/AMBARI_METRICS/metainfo.xml

@@ -69,6 +69,7 @@
           <packages>
             <package>
               <name>ambari-metrics-collector</name>
+              <condition>package_chooser.should_install_ams_collector()</condition>
             </package>
             <package>
               <name>ambari-metrics-monitor</name>

+ 15 - 2
ambari-server/src/main/java/org/apache/ambari/server/state/ServiceOsSpecific.java

@@ -165,6 +165,7 @@ public class ServiceOsSpecific {
   @XmlAccessorType(XmlAccessType.FIELD)
   public static class Package {
     private String name;
+    private String condition = "";
 
     /**
      * If true, package will not be attempted to be upgraded during RU.
@@ -180,6 +181,14 @@ public class ServiceOsSpecific {
     public void setName(String name) {
       this.name = name;
     }
+    
+    public String getCondition() {
+      return condition;
+    }
+
+    public void setCondition(String condition) {
+      this.condition = condition;
+    }
 
     public Boolean getSkipUpgrade() {
       return skipUpgrade;
@@ -200,14 +209,18 @@ public class ServiceOsSpecific {
       Package aPackage = (Package) o;
 
       if (!name.equals(aPackage.name)) return false;
-      return skipUpgrade.equals(aPackage.skipUpgrade);
-
+      if (!skipUpgrade.equals(aPackage.skipUpgrade)) return false;
+      if (!condition.equals(aPackage.condition)) return false;
+      
+      return true;
     }
 
     @Override
     public int hashCode() {
       int result = name.hashCode();
       result = 31 * result + skipUpgrade.hashCode();
+      result = 31 * result + condition.hashCode();
+      
       return result;
     }
   }

+ 3 - 1
ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_monitor.py

@@ -25,7 +25,9 @@ from status import check_service_status
 
 class AmsMonitor(Script):
   def install(self, env):
-    self.install_packages(env, exclude_packages = ['ambari-metrics-collector'])
+    import params
+    env.set_params(params)
+    self.install_packages(env)
     self.configure(env) # for security
 
   def configure(self, env):

+ 2 - 2
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py

@@ -30,8 +30,8 @@ from ambari_commons.os_family_impl import OsFamilyImpl
 class HbaseClient(Script):
   def install(self, env):
     import params
-    
-    self.install_packages(env, params.exclude_packages)
+    env.set_params(params)
+    self.install_packages(env)
     self.configure(env)
 
   def configure(self, env):

+ 2 - 1
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py

@@ -40,7 +40,8 @@ class HbaseMaster(Script):
 
   def install(self, env):
     import params
-    self.install_packages(env, params.exclude_packages)
+    env.set_params(params)
+    self.install_packages(env)
 
   def decommission(self, env):
     import params

+ 2 - 1
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py

@@ -34,7 +34,8 @@ from ambari_commons.os_family_impl import OsFamilyImpl
 class HbaseRegionServer(Script):
   def install(self, env):
     import params
-    self.install_packages(env, params.exclude_packages)
+    env.set_params(params)
+    self.install_packages(env)
 
   def configure(self, env):
     import params

+ 0 - 5
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py

@@ -113,11 +113,6 @@ phoenix_hosts = default('/clusterHostInfo/phoenix_query_server_hosts', [])
 phoenix_enabled = default('/configurations/hbase-env/phoenix_sql_enabled', False)
 has_phoenix = len(phoenix_hosts) > 0
 
-if not has_phoenix and not phoenix_enabled:
-  exclude_packages = ['phoenix*']
-else:
-  exclude_packages = []
-
 underscored_version = stack_version_unformatted.replace('.', '_')
 dashed_version = stack_version_unformatted.replace('.', '-')
 if OSCheck.is_redhat_family() or OSCheck.is_suse_family():

+ 2 - 1
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py

@@ -28,7 +28,8 @@ class PhoenixQueryServer(Script):
 
   def install(self, env):
     import params
-    self.install_packages(env, params.exclude_packages)
+    env.set_params(params)
+    self.install_packages(env)
 
 
   def get_stack_to_component(self):

+ 4 - 0
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/metainfo.xml

@@ -165,6 +165,7 @@
             <package>
               <name>hadoop-lzo</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_lzo</condition>
             </package>
           </packages>
         </osSpecific>
@@ -181,10 +182,12 @@
             <package>
               <name>lzo</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_lzo</condition>
             </package>
             <package>
               <name>hadoop-lzo-native</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_lzo</condition>
             </package>
             <package>
               <name>hadoop-libhdfs</name>
@@ -204,6 +207,7 @@
             <package>
               <name>liblzo2-2</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_lzo</condition>
             </package>
             <package>
               <name>hadoop-hdfs</name>

+ 2 - 2
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py

@@ -46,8 +46,8 @@ class DataNode(Script):
 
   def install(self, env):
     import params
-    self.install_packages(env, params.exclude_packages)
     env.set_params(params)
+    self.install_packages(env)
 
   def configure(self, env):
     import params
@@ -161,7 +161,7 @@ class DataNodeDefault(DataNode):
 class DataNodeWindows(DataNode):
   def install(self, env):
     import install_params
-    self.install_packages(env, install_params.exclude_packages)
+    self.install_packages(env)
 
 if __name__ == "__main__":
   DataNode().execute()

+ 2 - 2
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py

@@ -31,8 +31,8 @@ class HdfsClient(Script):
 
   def install(self, env):
     import params
-    self.install_packages(env, params.exclude_packages)
     env.set_params(params)
+    self.install_packages(env)
     self.configure(env)
 
   def configure(self, env):
@@ -113,7 +113,7 @@ class HdfsClientDefault(HdfsClient):
 class HdfsClientWindows(HdfsClient):
   def install(self, env):
     import install_params
-    self.install_packages(env, install_params.exclude_packages)
+    self.install_packages(env)
     self.configure(env)
 
 if __name__ == "__main__":

+ 2 - 2
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py

@@ -36,8 +36,8 @@ from ambari_commons import OSConst
 class JournalNode(Script):
   def install(self, env):
     import params
-    self.install_packages(env, params.exclude_packages)
     env.set_params(params)
+    self.install_packages(env)  
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class JournalNodeDefault(JournalNode):
@@ -164,7 +164,7 @@ class JournalNodeDefault(JournalNode):
 class JournalNodeWindows(JournalNode):
   def install(self, env):
     import install_params
-    self.install_packages(env, install_params.exclude_packages)
+    self.install_packages(env)
 
   def start(self, env):
     import params

+ 2 - 2
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py

@@ -82,8 +82,8 @@ class NameNode(Script):
 
   def install(self, env):
     import params
-    self.install_packages(env, params.exclude_packages)
     env.set_params(params)
+    self.install_packages(env)
     #TODO we need this for HA because of manual steps
     self.configure(env)
 
@@ -347,7 +347,7 @@ class NameNodeDefault(NameNode):
 class NameNodeWindows(NameNode):
   def install(self, env):
     import install_params
-    self.install_packages(env, install_params.exclude_packages)
+    self.install_packages(env)
     #TODO we need this for HA because of manual steps
     self.configure(env)
 

+ 1 - 1
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py

@@ -39,7 +39,7 @@ class NFSGateway(Script):
 
     env.set_params(params)
 
-    self.install_packages(env, params.exclude_packages)
+    self.install_packages(env)
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params

+ 0 - 4
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py

@@ -341,10 +341,6 @@ HdfsResource = functools.partial(
 io_compression_codecs = default("/configurations/core-site/io.compression.codecs", None)
 lzo_enabled = io_compression_codecs is not None and "com.hadoop.compression.lzo" in io_compression_codecs.lower()
 lzo_packages = get_lzo_packages(stack_version_unformatted)
-
-exclude_packages = []
-if not lzo_enabled:
-  exclude_packages += lzo_packages
   
 name_node_params = default("/commandParams/namenode", None)
 

+ 1 - 1
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py

@@ -36,7 +36,7 @@ class SNameNode(Script):
   def install(self, env):
     import params
     env.set_params(params)
-    self.install_packages(env, params.exclude_packages)
+    self.install_packages(env)
 
   def configure(self, env):
     import params

+ 1 - 1
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py

@@ -39,7 +39,7 @@ class ZkfcSlave(Script):
   def install(self, env):
     import params
     env.set_params(params)
-    self.install_packages(env, params.exclude_packages)
+    self.install_packages(env)
 
   def configure(self, env):
     import params

+ 4 - 0
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/metainfo.xml

@@ -255,6 +255,7 @@
             <package>
               <name>mysql-connector-java</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_mysl_connector</condition>
             </package>
           </packages>
         </osSpecific>
@@ -273,6 +274,7 @@
             <package>
               <name>mysql-server</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_mysql</condition>
             </package>
           </packages>
         </osSpecific>
@@ -282,10 +284,12 @@
             <package>
               <name>mysql-community-release</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_mysql</condition>
             </package>
             <package>
               <name>mysql-community-server</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_mysql</condition>
             </package>
           </packages>
         </osSpecific>

+ 1 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py

@@ -31,7 +31,7 @@ from resource_management.libraries.script.script import Script
 class HCatClient(Script):
   def install(self, env):
     import params
-    self.install_packages(env, exclude_packages=params.hive_exclude_packages)
+    self.install_packages(env)
     self.configure(env)
 
   def configure(self, env):

+ 1 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py

@@ -28,7 +28,7 @@ from ambari_commons import OSConst
 class HiveClient(Script):
   def install(self, env):
     import params
-    self.install_packages(env, exclude_packages=params.hive_exclude_packages)
+    self.install_packages(env)
     self.configure(env)
 
   def status(self, env):

+ 1 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py

@@ -47,7 +47,7 @@ LEGACY_HIVE_SERVER_CONF = "/etc/hive/conf.server"
 class HiveMetastore(Script):
   def install(self, env):
     import params
-    self.install_packages(env, exclude_packages = params.hive_exclude_packages)
+    self.install_packages(env)
 
 
   def start(self, env, upgrade_type=None):

+ 1 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py

@@ -47,7 +47,7 @@ from hive_service import hive_service
 class HiveServer(Script):
   def install(self, env):
     import params
-    self.install_packages(env, exclude_packages=params.hive_exclude_packages)
+    self.install_packages(env)
 
   def configure(self, env):
     import params

+ 1 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/mysql_server.py

@@ -30,7 +30,7 @@ from mysql_utils import mysql_configure
 class MysqlServer(Script):
   def install(self, env):
     import params
-    self.install_packages(env, exclude_packages=params.hive_exclude_packages)
+    self.install_packages(env)
     self.configure(env)
 
   def clean(self, env):

+ 1 - 17
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py

@@ -384,19 +384,6 @@ hive_authorization_enabled = config['configurations']['hive-site']['hive.securit
 
 mysql_jdbc_driver_jar = "/usr/share/java/mysql-connector-java.jar"
 hive_use_existing_db = hive_database.startswith('Existing')
-hive_exclude_packages = []
-
-# There are other packages that contain /usr/share/java/mysql-connector-java.jar (like libmysql-java),
-# trying to install mysql-connector-java upon them can cause packages to conflict.
-if hive_use_existing_db:
-  hive_exclude_packages = ['mysql-connector-java', 'mysql', 'mysql-server',
-                           'mysql-community-release', 'mysql-community-server']
-else:
-  if 'role' in config and config['role'] != "MYSQL_SERVER":
-    hive_exclude_packages = ['mysql', 'mysql-server', 'mysql-community-release',
-                             'mysql-community-server']
-  if os.path.exists(mysql_jdbc_driver_jar):
-    hive_exclude_packages.append('mysql-connector-java')
 
 
 hive_site_config = dict(config['configurations']['hive-site'])
@@ -410,10 +397,7 @@ classpath_addition = ""
 atlas_plugin_package = "atlas-metadata*-hive-plugin"
 atlas_ubuntu_plugin_package = "atlas-metadata.*-hive-plugin"
 
-if not has_atlas:
-  hive_exclude_packages.append(atlas_plugin_package)
-  hive_exclude_packages.append(atlas_ubuntu_plugin_package)
-else:
+if has_atlas:
   # client.properties
   atlas_client_props = {}
   auth_enabled = config['configurations']['application-properties'].get(

+ 0 - 2
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py

@@ -61,8 +61,6 @@ hive_metastore_db_type = config['configurations']['hive-env']['hive_database_typ
 hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
 hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']
 
-hive_exclude_packages = []
-
 hive_execution_engine = config["configurations"]["hive-site"]["hive.execution.engine"]
 
 ######## Metastore Schema

+ 1 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py

@@ -33,7 +33,7 @@ from ambari_commons.os_family_impl import OsFamilyImpl
 class WebHCatServer(Script):
   def install(self, env):
     import params
-    self.install_packages(env, exclude_packages=params.hive_exclude_packages)
+    self.install_packages(env)
 
   def start(self, env, upgrade_type=None):
     import params

+ 6 - 0
ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/metainfo.xml

@@ -78,10 +78,12 @@
             <package>
               <name>krb5-server</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_kerberos_server</condition>
             </package>
             <package>
               <name>krb5-libs</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_kerberos_server</condition>
             </package>
             <package>
               <name>krb5-workstation</name>
@@ -96,10 +98,12 @@
             <package>
               <name>krb5-kdc</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_kerberos_server</condition>
             </package>
             <package>
               <name>krb5-admin-server</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_kerberos_server</condition>
             </package>
             <package>
               <name>krb5-user</name>
@@ -118,6 +122,7 @@
             <package>
               <name>krb5</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_kerberos_server</condition>
             </package>
             <package>
               <name>krb5-client</name>
@@ -126,6 +131,7 @@
             <package>
               <name>krb5-server</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_kerberos_server</condition>
             </package>
           </packages>
         </osSpecific>

+ 1 - 1
ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/kerberos_client.py

@@ -24,7 +24,7 @@ class KerberosClient(KerberosScript):
   def install(self, env):
     install_packages = default('/configurations/kerberos-env/install_packages', "true")
     if install_packages:
-      self.install_packages(env, ['krb5-server', 'krb5-libs', 'krb5-auth-dialog', 'krb5', 'krb5-kdc', 'krb5-admin-server'])
+      self.install_packages(env)
     else:
       print "Kerberos client packages are not being installed, manual installation is required."
 

+ 1 - 0
ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/metainfo.xml

@@ -72,6 +72,7 @@
             <package>
               <name>mysql-connector-java</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_mysl_connector</condition>
             </package>
           </packages>
         </osSpecific>

+ 4 - 13
ambari-server/src/main/resources/custom_actions/scripts/install_packages.py

@@ -28,6 +28,7 @@ import os.path
 import ambari_simplejson as json  # simplejson is much faster comparing to Python 2.6 json module and has the same functions set.
 
 from resource_management import *
+import resource_management
 from resource_management.libraries.functions.list_ambari_managed_repos import list_ambari_managed_repos
 from ambari_commons.os_check import OSCheck, OSConst
 from resource_management.libraries.functions.packages_analyzer import allInstalledPackages
@@ -416,25 +417,15 @@ class InstallPackages(Script):
   def filter_package_list(self, package_list):
     """
     Note: that we have skipUpgrade option in metainfo.xml to filter packages,
+    as well as condition option to filter them conditionally,
     so use this method only if, for some reason the metainfo option cannot be used.
-    
-    Here we filter packages that are managed with custom logic in package
-    scripts. Usually this packages come from system repositories, and either
-     are not available when we restrict repository list, or should not be
-    installed on host at all.
+  
     :param package_list: original list
     :return: filtered package_list
     """
     filtered_package_list = []
     for package in package_list:
-      skip_package = False
-      
-      # skip upgrade for hadooplzo* versioned package, only if lzo is disabled 
-      io_compression_codecs = default("/configurations/core-site/io.compression.codecs", None)
-      if not io_compression_codecs or "com.hadoop.compression.lzo" not in io_compression_codecs:
-        skip_package = package['name'].startswith('hadooplzo')
-
-      if not skip_package:
+      if Script.check_package_condition(package):
         filtered_package_list.append(package)
     return filtered_package_list
 

+ 4 - 0
ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/metainfo.xml

@@ -58,6 +58,7 @@
             <package>
               <name>mysql-connector-java</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_mysl_connector</condition>
             </package>
           </packages>
         </osSpecific>
@@ -76,10 +77,12 @@
             <package>
               <name>mysql-community-release</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_mysql</condition>
             </package>
             <package>
               <name>mysql-community-server</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_mysql</condition>
             </package>
           </packages>
         </osSpecific>
@@ -89,6 +92,7 @@
             <package>
               <name>mysql-server</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_mysql</condition>
             </package>
           </packages>
         </osSpecific>

+ 2 - 0
ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml

@@ -32,6 +32,7 @@
             </package>
             <package>
               <name>phoenix_${stack_version}</name>
+              <condition>should_install_phoenix</condition>
             </package>
           </packages>
         </osSpecific>
@@ -43,6 +44,7 @@
             </package>
             <package>
               <name>phoenix-${stack_version}</name>
+              <condition>should_install_phoenix</condition>
             </package>
           </packages>
         </osSpecific>

+ 4 - 0
ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml

@@ -39,9 +39,11 @@
             <package>
               <name>lzo</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_lzo</condition>
             </package>
             <package>
               <name>hadooplzo_${stack_version}</name>
+              <condition>should_install_lzo</condition>
             </package>
             <package>
               <name>hadoop_${stack_version}-libhdfs</name>
@@ -78,10 +80,12 @@
             </package>
             <package>
               <name>hadooplzo-${stack_version}</name>
+              <condition>should_install_lzo</condition>
             </package>
             <package>
               <name>liblzo2-2</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_lzo</condition>
             </package>
             <package>
               <name>libhdfs0-${stack_version}</name>

+ 5 - 0
ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml

@@ -63,6 +63,7 @@
             <package>
               <name>mysql-connector-java</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_mysl_connector</condition>
             </package>
           </packages>
         </osSpecific>
@@ -95,10 +96,12 @@
             <package>
               <name>mysql-community-release</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_mysql</condition>
             </package>
             <package>
               <name>mysql-community-server</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_mysql</condition>
             </package>
           </packages>
         </osSpecific>
@@ -122,6 +125,7 @@
             <package>
               <name>mysql-server</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_mysql</condition>
             </package>
           </packages>
         </osSpecific>
@@ -131,6 +135,7 @@
             <package>
               <name>mysql-client</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_mysql</condition>
             </package>
           </packages>
         </osSpecific>

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml

@@ -67,6 +67,7 @@
             <package>
               <name>mysql-connector-java</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_mysl_connector</condition>
             </package>
             <package>
               <name>extjs</name>

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.2/services/SQOOP/metainfo.xml

@@ -28,6 +28,7 @@
             <package>
               <name>mysql-connector-java</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_mysl_connector</condition>
             </package>
           </packages>
         </osSpecific>

+ 3 - 0
ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/metainfo.xml

@@ -70,9 +70,11 @@
             <package>
               <name>lzo</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_lzo</condition>
             </package>
             <package>
               <name>hadooplzo_${stack_version}</name>
+              <condition>should_install_lzo</condition>
             </package>
             <package>
               <name>hadoop_${stack_version}-libhdfs</name>
@@ -109,6 +111,7 @@
             </package>
             <package>
               <name>hadooplzo-${stack_version}</name>
+              <condition>should_install_lzo</condition>
             </package>
             <package>
               <name>libhdfs0-${stack_version}</name>

+ 7 - 0
ambari-server/src/main/resources/stacks/HDP/2.3/services/HIVE/metainfo.xml

@@ -29,6 +29,7 @@
             <package>
               <name>mysql-connector-java</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_mysl_connector</condition>
             </package>
           </packages>
         </osSpecific>
@@ -46,6 +47,7 @@
             </package>
             <package>
               <name>atlas-metadata_${stack_version}-hive-plugin</name>
+              <condition>should_install_hive_atlas</condition>
             </package>
           </packages>
         </osSpecific>
@@ -55,6 +57,7 @@
             <package>
               <name>mysql</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_mysql</condition>
             </package>
           </packages>
         </osSpecific>
@@ -64,10 +67,12 @@
             <package>
               <name>mysql-community-release</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_mysql</condition>
             </package>
             <package>
               <name>mysql-community-server</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_mysql</condition>
             </package>
           </packages>
         </osSpecific>
@@ -85,6 +90,7 @@
             </package>
             <package>
               <name>atlas-metadata-${stack_version}-hive-plugin</name>
+              <condition>should_install_hive_atlas</condition>
             </package>
           </packages>
         </osSpecific>
@@ -94,6 +100,7 @@
             <package>
               <name>mysql-server</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_mysql</condition>
             </package>
           </packages>
         </osSpecific>

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.3/services/OOZIE/metainfo.xml

@@ -38,6 +38,7 @@
             <package>
               <name>mysql-connector-java</name>
               <skipUpgrade>true</skipUpgrade>
+              <condition>should_install_mysl_connector</condition>
             </package>
             <package>
               <name>extjs</name>

+ 2 - 2
ambari-server/src/test/python/custom_actions/configs/install_packages_config.json

@@ -26,7 +26,7 @@
         "stack_id": "HDP-2.2",
         "repository_version": "2.2.0.1-885",
         "base_urls": "[{\"name\":\"HDP-UTILS\",\"baseUrl\":\"http://repo1/HDP/centos5/2.x/updates/2.2.0.0\",\"repositoryId\":\"HDP-UTILS-1.1.0.20\"},{\"name\":\"HDP\",\"baseUrl\":\"http://repo1/HDP/centos5/2.x/updates/2.2.0.0\",\"repositoryId\":\"HDP-2.2\"}]",
-        "package_list": "[{\"name\":\"hadoop_${stack_version}\"},{\"name\":\"snappy\"},{\"name\":\"snappy-devel\"},{\"name\":\"lzo\"},{\"name\":\"hadooplzo_${stack_version}\"},{\"name\":\"hadoop_${stack_version}-libhdfs\"},{\"name\":\"ambari-log4j\"}]"
+        "package_list": "[{\"name\":\"hadoop_${stack_version}\", \"condition\": \"\"},{\"name\":\"snappy\", \"condition\": \"\"},{\"name\":\"snappy-devel\", \"condition\": \"\"},{\"name\":\"lzo\", \"condition\": \"\"},{\"name\":\"hadooplzo_${stack_version}\", \"condition\": \"\"},{\"name\":\"hadoop_${stack_version}-libhdfs\", \"condition\": \"\"},{\"name\":\"ambari-log4j\", \"condition\": \"\"}]"
     }, 
     "serviceName": "null", 
     "role": "install_packages", 
@@ -44,7 +44,7 @@
         "script_type": "PYTHON",
         "repository_version": "2.2.0.1-885",
         "base_urls": "[{\"name\":\"HDP-UTILS\",\"baseUrl\":\"http://repo1/HDP/centos5/2.x/updates/2.2.0.0\",\"repositoryId\":\"HDP-UTILS-1.1.0.20\"},{\"name\":\"HDP\",\"baseUrl\":\"http://repo1/HDP/centos5/2.x/updates/2.2.0.0\",\"repositoryId\":\"HDP-2.2\"}]",
-        "package_list": "[{\"name\":\"hadoop_${stack_version}\"},{\"name\":\"snappy\"},{\"name\":\"snappy-devel\"},{\"name\":\"lzo\"},{\"name\":\"hadooplzo_${stack_version}\"},{\"name\":\"hadoop_${stack_version}-libhdfs\"},{\"name\":\"ambari-log4j\"}]",
+        "package_list": "[{\"name\":\"hadoop_${stack_version}\", \"condition\": \"\"},{\"name\":\"snappy\", \"condition\": \"\"},{\"name\":\"snappy-devel\", \"condition\": \"\"},{\"name\":\"lzo\", \"condition\": \"\"},{\"name\":\"hadooplzo_${stack_version}\", \"condition\": \"\"},{\"name\":\"hadoop_${stack_version}-libhdfs\", \"condition\": \"\"},{\"name\":\"ambari-log4j\", \"condition\": \"\"}]",
         "script": "install_packages.py"
     }, 
     "commandId": "14-1", 

+ 1 - 1
ambari-server/src/test/python/stacks/2.0.6/configs/hbase_no_phx.json

@@ -16,7 +16,7 @@
         "java_home": "/usr/jdk64/jdk1.7.0_45",
         "java_version": "8",
         "host_sys_prepped": "false",
-        "package_list": "[{\"name\":\"hbase_2_3_*\",\"skipUpgrade\":false},{\"name\":\"phoenix_2_3_*\",\"skipUpgrade\":false}]",
+        "package_list": "[{\"name\":\"hbase_2_3_*\",\"condition\":\"\",\"skipUpgrade\":false},{\"name\":\"phoenix_*\",\"condition\":\"{{has_phoenix or phoenix_enabled}}\",\"skipUpgrade\":false}]",
         "db_name": "ambari",
         "group_list": "[\"hadoop\",\"nobody\",\"users\"]",
         "user_list": "[\"hive\",\"oozie\",\"nobody\",\"ambari-qa\",\"flume\",\"hdfs\",\"storm\",\"mapred\",\"hbase\",\"tez\",\"zookeeper\",\"falcon\",\"sqoop\",\"yarn\",\"hcat\"]"

+ 1 - 1
ambari-server/src/test/python/stacks/2.0.6/configs/hbase_with_phx.json

@@ -16,7 +16,7 @@
         "java_home": "/usr/jdk64/jdk1.7.0_45",
         "java_version": "8",
         "host_sys_prepped": "false",
-        "package_list": "[{\"name\":\"hbase_2_3_*\",\"skipUpgrade\":false},{\"name\":\"phoenix_2_3_*\",\"skipUpgrade\":false}]",
+        "package_list": "[{\"name\":\"hbase_2_3_*\",\"condition\":\"\",\"skipUpgrade\":false},{\"name\":\"phoenix_2_3_*\",\"condition\":\"{{has_phoenix or phoenix_enabled}}\",\"skipUpgrade\":false}]",
         "db_name": "ambari",
         "group_list": "[\"hadoop\",\"nobody\",\"users\"]",
         "user_list": "[\"hive\",\"oozie\",\"nobody\",\"ambari-qa\",\"flume\",\"hdfs\",\"storm\",\"mapred\",\"hbase\",\"tez\",\"zookeeper\",\"falcon\",\"sqoop\",\"yarn\",\"hcat\"]"

+ 1 - 1
ambari-server/src/test/python/stacks/2.3/configs/pxf_default.json

@@ -14,7 +14,7 @@
         "ambari_db_rca_username": "mapred",
         "java_home": "/usr/jdk64/jdk1.7.0_45",
         "db_name": "ambari",
-        "package_list": "[{\"name\":\"pxf-service\",\"skipUpgrade\":false},{\"name\":\"apache-tomcat\",\"skipUpgrade\":false},{\"name\":\"pxf-hive\",\"skipUpgrade\":false},{\"name\":\"pxf-hdfs\",\"skipUpgrade\":false},{\"name\":\"pxf-hbase\",\"skipUpgrade\":false}]"
+        "package_list": "[{\"name\":\"pxf-service\",\"skipUpgrade\":false, \"condition\": \"\"},{\"name\":\"apache-tomcat\",\"skipUpgrade\":false, \"condition\": \"\"},{\"name\":\"pxf-hive\",\"skipUpgrade\":false, \"condition\": \"\"},{\"name\":\"pxf-hdfs\",\"skipUpgrade\":false, \"condition\": \"\"},{\"name\":\"pxf-hbase\",\"skipUpgrade\":false, \"condition\": \"\"}]"
     },
     "commandType": "EXECUTION_COMMAND",
     "roleParams": {},