Browse Source

Revert "AMBARI-11579. Add installation of atlas hive hook (Jon Maron via smohanty)"

This reverts commit aa9aaf6a15ebdb65fca9180c1f5fe408c059a4bb.
Sumit Mohanty 10 năm trước cách đây
mục cha
commit
4e1e89d104

+ 0 - 39
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/atlas_plugin_utils.py

@@ -1,39 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-from resource_management import Script
-from resource_management.core import logger
-
-
-def configure_for_plugin(command_data_file):
-    import json
-    savedConfig = Script.get_config()
-    if savedConfig['hostLevelParams'].get('custom_command', '') == 'RESTART':
-        try:
-            with open(command_data_file) as f:
-                pass
-                Script.config = json.load(f)
-                Script.config['hostLevelParams']['package_list'] = \
-                    "[{\"name\":\"atlas-metadata*-hive-plugin\"}]"
-
-        except IOError:
-            logger.exception("Can not read json file with command parameters: ")
-
-    return savedConfig
-

+ 0 - 3
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py

@@ -23,15 +23,12 @@ from resource_management import *
 from hcat import hcat
 from hcat import hcat
 from ambari_commons import OSConst
 from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyImpl
-from atlas_plugin_utils import configure_for_plugin
 
 
 
 
 class HCatClient(Script):
 class HCatClient(Script):
   def install(self, env):
   def install(self, env):
     import params
     import params
-    savedConfig = configure_for_plugin(self.command_data_file)
     self.install_packages(env, exclude_packages=params.hive_exclude_packages)
     self.install_packages(env, exclude_packages=params.hive_exclude_packages)
-    Script.config = savedConfig
     self.configure(env)
     self.configure(env)
 
 
   def configure(self, env):
   def configure(self, env):

+ 2 - 10
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py

@@ -21,7 +21,6 @@ limitations under the License.
 import os
 import os
 import glob
 import glob
 from urlparse import urlparse
 from urlparse import urlparse
-from resource_management import PropertiesFile
 
 
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
@@ -198,19 +197,12 @@ def hive(name=None):
 
 
   XmlConfig("hive-site.xml",
   XmlConfig("hive-site.xml",
             conf_dir=params.hive_config_dir,
             conf_dir=params.hive_config_dir,
-            configurations=params.hive_site_config,
+            configurations=params.config['configurations']['hive-site'],
             configuration_attributes=params.config['configuration_attributes']['hive-site'],
             configuration_attributes=params.config['configuration_attributes']['hive-site'],
             owner=params.hive_user,
             owner=params.hive_user,
             group=params.user_group,
             group=params.user_group,
             mode=0644)
             mode=0644)
-
-  if params.atlas_hosts:
-      PropertiesFile(format('{hive_config_dir}/client.properties'),
-                     properties = params.atlas_client_props,
-                     owner = params.hive_user,
-                     group = params.user_group,
-                     mode = 0644)
-
+  
   if params.hive_specific_configs_supported and name == 'hiveserver2':
   if params.hive_specific_configs_supported and name == 'hiveserver2':
     XmlConfig("hiveserver2-site.xml",
     XmlConfig("hiveserver2-site.xml",
               conf_dir=params.hive_server_conf_dir,
               conf_dir=params.hive_server_conf_dir,

+ 1 - 3
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py

@@ -24,14 +24,12 @@ from resource_management.libraries.functions import hdp_select
 from hive import hive
 from hive import hive
 from ambari_commons.os_family_impl import OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyImpl
 from ambari_commons import OSConst
 from ambari_commons import OSConst
-from atlas_plugin_utils import configure_for_plugin
+
 
 
 class HiveClient(Script):
 class HiveClient(Script):
   def install(self, env):
   def install(self, env):
     import params
     import params
-    savedConfig = configure_for_plugin(self.command_data_file)
     self.install_packages(env, exclude_packages=params.hive_exclude_packages)
     self.install_packages(env, exclude_packages=params.hive_exclude_packages)
-    Script.config = savedConfig
     self.configure(env)
     self.configure(env)
 
 
   def status(self, env):
   def status(self, env):

+ 0 - 4
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py

@@ -35,7 +35,6 @@ from hive import hive
 from hive_service import hive_service
 from hive_service import hive_service
 from ambari_commons.os_family_impl import OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyImpl
 from ambari_commons import OSConst
 from ambari_commons import OSConst
-from atlas_plugin_utils import configure_for_plugin
 
 
 # the legacy conf.server location in HDP 2.2
 # the legacy conf.server location in HDP 2.2
 LEGACY_HIVE_SERVER_CONF = "/etc/hive/conf.server"
 LEGACY_HIVE_SERVER_CONF = "/etc/hive/conf.server"
@@ -59,9 +58,6 @@ class HiveMetastore(Script):
   def configure(self, env):
   def configure(self, env):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
-    savedConfig = configure_for_plugin(self.command_data_file)
-    self.install_packages(env, exclude_packages = params.hive_exclude_packages)
-    Script.config = savedConfig
     hive(name = 'metastore')
     hive(name = 'metastore')
 
 
 
 

+ 0 - 4
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py

@@ -36,7 +36,6 @@ if OSCheck.is_windows_family():
   from resource_management.libraries.functions.windows_service_utils import check_windows_service_status
   from resource_management.libraries.functions.windows_service_utils import check_windows_service_status
 from setup_ranger_hive import setup_ranger_hive
 from setup_ranger_hive import setup_ranger_hive
 from ambari_commons.os_family_impl import OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyImpl
-from atlas_plugin_utils import configure_for_plugin
 from resource_management.core.logger import Logger
 from resource_management.core.logger import Logger
 
 
 import hive_server_upgrade
 import hive_server_upgrade
@@ -52,9 +51,6 @@ class HiveServer(Script):
   def configure(self, env):
   def configure(self, env):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
-    savedConfig = configure_for_plugin(self.command_data_file)
-    self.install_packages(env, exclude_packages=params.hive_exclude_packages)
-    Script.config = savedConfig
     hive(name='hiveserver2')
     hive(name='hiveserver2')
 
 
 
 

+ 0 - 37
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py

@@ -347,43 +347,6 @@ else:
   if os.path.exists(mysql_jdbc_driver_jar):
   if os.path.exists(mysql_jdbc_driver_jar):
     hive_exclude_packages.append('mysql-connector-java')
     hive_exclude_packages.append('mysql-connector-java')
 
 
-
-hive_site_config = dict(config['configurations']['hive-site'])
-########################################################
-############# Atlas related params #####################
-########################################################
-
-atlas_hosts = default('/clusterHostInfo/atlas_server_hosts', [])
-classpath_addition = ""
-if not atlas_hosts:
-  hive_exclude_packages.append('atlas-metadata*-hive-plugin')
-else:
-  # hive-site
-  hive_site_config['hive.cluster.name'] = config['clusterName']
-  atlas_config = config['configurations']['application-properties']
-  metadata_port = config['configurations']['metadata-env']['metadata_port']
-  metadata_host = atlas_hosts[0]
-  tls_enabled = config['configurations']['application-properties']['enableTLS']
-  if tls_enabled:
-    scheme = "https"
-  else:
-    scheme = "http"
-  hive_site_config['hive.hook.dgi.url'] = format('{scheme}://{metadata_host}:{metadata_port}')
-
-  if not 'hive.exec.post.hooks' in hive_site_config:
-    hive_site_config['hive.exec.post.hooks'] = 'org.apache.hadoop.metadata.hive.hook.HiveHook'
-  else:
-    current_hook = hive_site_config['hive.exec.post.hooks']
-    hive_site_config['hive.exec.post.hooks'] =  format('{current_hook}, org.apache.hadoop.metadata.hive.hook.HiveHook')
-
-  # client.properties
-  atlas_client_props = {}
-  auth_enabled = config['configurations']['application-properties'].get(
-    'metadata.http.authentication.enabled', False)
-  atlas_client_props['metadata.http.authentication.enabled'] = auth_enabled
-  if auth_enabled:
-    atlas_client_props['metadata.http.authentication.type'] = config['configurations']['application-properties'].get('metadata.http.authentication.type', 'simple')
-
 ########################################################
 ########################################################
 ########### WebHCat related params #####################
 ########### WebHCat related params #####################
 ########################################################
 ########################################################

+ 0 - 4
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py

@@ -28,7 +28,6 @@ from webhcat import webhcat
 from webhcat_service import webhcat_service
 from webhcat_service import webhcat_service
 from ambari_commons import OSConst
 from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyImpl
-from atlas_plugin_utils import configure_for_plugin
 
 
 
 
 class WebHCatServer(Script):
 class WebHCatServer(Script):
@@ -50,9 +49,6 @@ class WebHCatServer(Script):
   def configure(self, env):
   def configure(self, env):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
-    savedConfig = configure_for_plugin(self.command_data_file)
-    self.install_packages(env, exclude_packages=params.hive_exclude_packages)
-    Script.config = savedConfig
     webhcat()
     webhcat()
 
 
 
 

+ 0 - 3
ambari-server/src/main/resources/stacks/HDP/2.3/services/HIVE/metainfo.xml

@@ -29,9 +29,6 @@
             <package>
             <package>
               <name>mysql-connector-java</name>
               <name>mysql-connector-java</name>
             </package>
             </package>
-            <package>
-              <name>atlas-metadata*-hive-plugin</name>
-            </package>
           </packages>
           </packages>
         </osSpecific>
         </osSpecific>
         <osSpecific>
         <osSpecific>

+ 0 - 1
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py

@@ -31,7 +31,6 @@ from resource_management.libraries import functions
 
 
 @patch.object(functions, "get_hdp_version", new = MagicMock(return_value="2.0.0.0-1234"))
 @patch.object(functions, "get_hdp_version", new = MagicMock(return_value="2.0.0.0-1234"))
 @patch("resource_management.libraries.functions.check_thrift_port_sasl", new=MagicMock())
 @patch("resource_management.libraries.functions.check_thrift_port_sasl", new=MagicMock())
-@patch("atlas_plugin_utils.configure_for_plugin", new=MagicMock())
 class TestHiveServer(RMFTestCase):
 class TestHiveServer(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
   STACK_VERSION = "2.0.6"
   STACK_VERSION = "2.0.6"