Browse Source

AMBARI-10421 - [WinTP2] Merge HDPWIN HIVE package scripts to common services

Artem Baranchuk 10 năm trước cách đây
mục cha
commit
7a68f8e49b
33 tập tin đã thay đổi với 700 bổ sung3876 xóa
  1. 15 0
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py
  2. 14 4
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
  3. 11 0
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py
  4. 44 3
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
  5. 23 15
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
  6. 20 21
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
  7. 26 9
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
  8. 16 0
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
  9. 4 393
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
  10. 414 0
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
  11. 4 5
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py
  12. 21 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/service_check.py
  13. 30 25
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
  14. 11 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
  15. 22 15
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
  16. 10 0
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
  17. 11 0
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
  18. 0 777
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/etc/hive-schema-0.12.0.mysql.sql
  19. 0 717
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/etc/hive-schema-0.12.0.oracle.sql
  20. 0 1405
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/etc/hive-schema-0.12.0.postgres.sql
  21. 4 0
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/metainfo.xml
  22. 0 40
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/hcat_client.py
  23. 0 25
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/hcat_service_check.py
  24. 0 61
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/hive.py
  25. 0 41
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/hive_client.py
  26. 0 53
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/hive_metastore.py
  27. 0 52
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/hive_server.py
  28. 0 46
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/mysql_server.py
  29. 0 39
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/service_check.py
  30. 0 23
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/service_mapping.py
  31. 0 30
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/webhcat.py
  32. 0 48
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/webhcat_server.py
  33. 0 27
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/webhcat_service_check.py

+ 15 - 0
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py

@@ -20,8 +20,23 @@ limitations under the License.
 
 from resource_management import *
 import sys
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+from ambari_commons import OSConst
 
 
+@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
+def hcat():
+  import params
+
+  XmlConfig("hive-site.xml",
+            conf_dir = params.hive_conf_dir,
+            configurations = params.config['configurations']['hive-site'],
+            owner=params.hive_user,
+            configuration_attributes=params.config['configuration_attributes']['hive-site']
+  )
+
+
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 def hcat():
   import params
 

+ 14 - 4
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py

@@ -21,12 +21,11 @@ limitations under the License.
 import sys
 from resource_management import *
 from hcat import hcat
+from ambari_commons import OSConst
+from ambari_commons.os_family_impl import OsFamilyImpl
 
-class HCatClient(Script):
-
-  def get_stack_to_component(self):
-    return {"HDP": "hadoop-client"}
 
+class HCatClient(Script):
   def install(self, env):
     import params
     self.install_packages(env, exclude_packages=params.hive_exclude_packages)
@@ -41,5 +40,16 @@ class HCatClient(Script):
     raise ClientComponentHasNoStatus()
 
 
+@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
+class HCatClientWindows(HCatClient):
+  pass
+
+
+@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
+class HCatClientDefault(HCatClient):
+  def get_stack_to_component(self):
+    return {"HDP": "hadoop-client"}
+
+
 if __name__ == "__main__":
   HCatClient().execute()

+ 11 - 0
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py

@@ -20,7 +20,18 @@ limitations under the License.
 
 from resource_management import *
 from resource_management.libraries.functions import get_unique_id_and_date
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+from ambari_commons import OSConst
 
+@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
+def hcat_service_check():
+  import params
+  smoke_cmd = os.path.join(params.hdp_root, "Run-SmokeTests.cmd")
+  service = "HCatalog"
+  Execute(format("cmd /C {smoke_cmd} {service}"), user=params.hcat_user, logoutput=True)
+
+
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 def hcat_service_check():
     import params
     unique = get_unique_id_and_date()

+ 44 - 3
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py

@@ -19,10 +19,54 @@ limitations under the License.
 """
 
 from resource_management import *
+from resource_management.libraries import functions
 import sys
 import os
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+from ambari_commons import OSConst
 
 
+@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
+def hive(name=None):
+  import params
+  XmlConfig("hive-site.xml",
+            conf_dir = params.hive_conf_dir,
+            configurations = params.config['configurations']['hive-site'],
+            owner=params.hive_user,
+            configuration_attributes=params.config['configuration_attributes']['hive-site']
+  )
+  if name in ["hiveserver2","metastore"]:
+    Execute(format("cmd /c hadoop fs -mkdir -p {hive_warehouse_dir}"), logoutput=True, user=params.hadoop_user)
+
+  if name == 'metastore':
+    if params.init_metastore_schema:
+      check_schema_created_cmd = format('cmd /c "{hive_bin}\\hive.cmd --service schematool -info '
+                                        '-dbType {hive_metastore_db_type} '
+                                        '-userName {hive_metastore_user_name} '
+                                        '-passWord {hive_metastore_user_passwd!p}'
+                                        '&set EXITCODE=%ERRORLEVEL%&exit /B %EXITCODE%"', #cmd "feature", propagate the process exit code manually
+                                        hive_bin=params.hive_bin,
+                                        hive_metastore_db_type=params.hive_metastore_db_type,
+                                        hive_metastore_user_name=params.hive_metastore_user_name,
+                                        hive_metastore_user_passwd=params.hive_metastore_user_passwd)
+      try:
+        Execute(check_schema_created_cmd)
+      except Fail:
+        create_schema_cmd = format('cmd /c {hive_bin}\\hive.cmd --service schematool -initSchema '
+                                   '-dbType {hive_metastore_db_type} '
+                                   '-userName {hive_metastore_user_name} '
+                                   '-passWord {hive_metastore_user_passwd!p}',
+                                   hive_bin=params.hive_bin,
+                                   hive_metastore_db_type=params.hive_metastore_db_type,
+                                   hive_metastore_user_name=params.hive_metastore_user_name,
+                                   hive_metastore_user_passwd=params.hive_metastore_user_passwd)
+        Execute(create_schema_cmd,
+                user = params.hive_user,
+                logoutput=True
+        )
+
+
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 def hive(name=None):
   import params
 
@@ -179,7 +223,6 @@ def fill_conf_dir(component_conf_dir):
          content=StaticFile(format("{component_conf_dir}/{log4j_filename}.template"))
     )
 
-
 def crt_directory(name):
   import params
 
@@ -190,7 +233,6 @@ def crt_directory(name):
             group=params.user_group,
             mode=0755)
 
-
 def crt_file(name):
   import params
 
@@ -199,7 +241,6 @@ def crt_file(name):
        group=params.user_group
   )
 
-
 def jdbc_connector():
   import params
 

+ 23 - 15
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py

@@ -19,34 +19,42 @@ limitations under the License.
 """
 import sys
 from resource_management import *
-
 from hive import hive
+from ambari_commons.os_family_impl import OsFamilyImpl
+from ambari_commons import OSConst
 
-class HiveClient(Script):
-
-  def get_stack_to_component(self):
-    return {"HDP": "hadoop-client"}
-
-  def pre_rolling_restart(self, env):
-    import params
-    env.set_params(params)
-
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
-      Execute(format("hdp-select set hadoop-client {version}"))
 
+class HiveClient(Script):
   def install(self, env):
     import params
     self.install_packages(env, exclude_packages=params.hive_exclude_packages)
     self.configure(env)
 
+  def status(self, env):
+    raise ClientComponentHasNoStatus()
+
   def configure(self, env):
     import params
     env.set_params(params)
-
     hive(name='client')
 
-  def status(self, env):
-    raise ClientComponentHasNoStatus()
+
+@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
+class HiveClientWindows(HiveClient):
+  pass
+
+
+@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
+class HiveClientDefault(HiveClient):
+  def get_stack_to_component(self):
+    return {"HDP": "hadoop-client"}
+
+  def pre_rolling_restart(self, env):
+    import params
+    env.set_params(params)
+    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+      Execute(format("hdp-select set hadoop-client {version}"))
+
 
 if __name__ == "__main__":
   HiveClient().execute()

+ 20 - 21
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py

@@ -25,51 +25,51 @@ from resource_management.libraries.functions.security_commons import build_expec
   FILE_TYPE_XML
 from hive import hive
 from hive_service import hive_service
+from ambari_commons.os_family_impl import OsFamilyImpl
+from ambari_commons import OSConst
 
 
 class HiveMetastore(Script):
-
-  def get_stack_to_component(self):
-    return {"HDP": "hive-metastore"}
-
   def install(self, env):
     import params
-
     self.install_packages(env, exclude_packages = params.hive_exclude_packages)
 
-
-  def configure(self, env):
+  def start(self, env, rolling_restart=False):
     import params
-
     env.set_params(params)
+    self.configure(env)  # FOR SECURITY
+    hive_service('metastore', action='start')
 
-    hive(name = 'metastore')
-
-
-  def start(self, env, rolling_restart = False):
+  def stop(self, env, rolling_restart=False):
     import params
-
     env.set_params(params)
-    self.configure(env)  # FOR SECURITY
-    hive_service('metastore', action = 'start')
-
+    hive_service('metastore', action='stop')
 
-  def stop(self, env, rolling_restart = False):
+  def configure(self, env):
     import params
-
     env.set_params(params)
-    hive_service('metastore', action = 'stop' )
+    hive(name = 'metastore')
 
 
+@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
+class HiveMetastoreWindows(HiveMetastore):
   def status(self, env):
     import status_params
+    check_windows_service_status(status_params.hive_metastore_win_service_name)
+
 
+@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
+class HiveMetastoreDefault(HiveMetastore):
+  def get_stack_to_component(self):
+    return {"HDP": "hive-metastore"}
+
+  def status(self, env):
+    import status_params
     env.set_params(status_params)
     pid_file = format("{hive_pid_dir}/{hive_metastore_pid}")
     # Recursively check all existing gmetad pid files
     check_process_status(pid_file)
 
-
   def pre_rolling_restart(self, env):
     Logger.info("Executing Metastore Rolling Upgrade pre-restart")
     import params
@@ -78,7 +78,6 @@ class HiveMetastore(Script):
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       Execute(format("hdp-select set hive-metastore {version}"))
 
-
   def security_status(self, env):
     import status_params
     env.set_params(status_params)

+ 26 - 9
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py

@@ -27,23 +27,44 @@ from resource_management.libraries.functions.security_commons import build_expec
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   FILE_TYPE_XML
 from setup_ranger_hive import setup_ranger_hive
+from ambari_commons.os_family_impl import OsFamilyImpl
+from ambari_commons import OSConst
 
-class HiveServer(Script):
-
-  def get_stack_to_component(self):
-    return {"HDP": "hive-server2"}
 
+class HiveServer(Script):
   def install(self, env):
     import params
     self.install_packages(env, exclude_packages=params.hive_exclude_packages)
 
-
   def configure(self, env):
     import params
     env.set_params(params)
     hive(name='hiveserver2')
 
 
+@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
+class HiveServerWindows(HiveServer):
+  def start(self, env):
+    import params
+    env.set_params(params)
+    self.configure(env) # FOR SECURITY
+    hive_service('hiveserver2', action='start')
+
+  def stop(self, env):
+    import params
+    env.set_params(params)
+    hive_service('hiveserver2', action='stop')
+
+  def status(self, env):
+    import status_params
+    check_windows_service_status(status_params.hive_server_win_service_name)
+
+
+@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
+class HiveServerDefault(HiveServer):
+  def get_stack_to_component(self):
+    return {"HDP": "hive-server2"}
+
   def start(self, env, rolling_restart=False):
     import params
     env.set_params(params)
@@ -55,7 +76,6 @@ class HiveServer(Script):
     hive_service( 'hiveserver2', action = 'start',
       rolling_restart=rolling_restart )
 
-
   def stop(self, env, rolling_restart=False):
     import params
     env.set_params(params)
@@ -65,7 +85,6 @@ class HiveServer(Script):
     else:
       hive_service( 'hiveserver2', action = 'stop' )
 
-
   def status(self, env):
     import status_params
     env.set_params(status_params)
@@ -74,7 +93,6 @@ class HiveServer(Script):
     # Recursively check all existing gmetad pid files
     check_process_status(pid_file)
 
-
   def pre_rolling_restart(self, env):
     Logger.info("Executing HiveServer2 Rolling Upgrade pre-restart")
     import params
@@ -85,7 +103,6 @@ class HiveServer(Script):
       copy_tarballs_to_hdfs('mapreduce', 'hive-server2', params.tez_user, params.hdfs_user, params.user_group)
       copy_tarballs_to_hdfs('tez', 'hive-server2', params.tez_user, params.hdfs_user, params.user_group)
 
-
   def security_status(self, env):
     import status_params
     env.set_params(status_params)

+ 16 - 0
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py

@@ -23,7 +23,23 @@ import sys
 import os
 import time
 from resource_management.core import shell
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+from ambari_commons import OSConst
 
+
+@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
+def hive_service(name, action='start', rolling_restart=False):
+  import params
+  if name == 'metastore':
+    if action == 'start' or action == 'stop':
+      Service(params.hive_metastore_win_service_name, action=action)
+
+  if name == 'hiveserver2':
+    if action == 'start' or action == 'stop':
+      Service(params.hive_server_win_service_name, action=action)
+
+
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 def hive_service(name, action='start', rolling_restart=False):
 
   import params

+ 4 - 393
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py

@@ -17,398 +17,9 @@ See the License for the specific language governing permissions and
 limitations under the License.
 
 """
+from ambari_commons import OSCheck
 
-from ambari_commons.constants import AMBARI_SUDO_BINARY
-from ambari_commons.os_check import OSCheck
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
-from resource_management.libraries.functions.default import default
-from resource_management import *
-import status_params
-import os
-
-# server configurations
-config = Script.get_config()
-tmp_dir = Script.get_tmp_dir()
-sudo = AMBARI_SUDO_BINARY
-
-stack_name = default("/hostLevelParams/stack_name", None)
-
-# node hostname
-hostname = config["hostname"]
-
-# This is expected to be of the form #.#.#.#
-stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
-stack_is_hdp21 = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.1') >= 0 and compare_versions(hdp_stack_version, '2.2') < 0
-
-# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
-version = default("/commandParams/version", None)
-
-# Hadoop params
-# TODO, this logic should initialize these parameters in a file inside the HDP 2.2 stack.
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
-  # start out with client libraries
-  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
-  hadoop_home = '/usr/hdp/current/hadoop-client'
-  hive_bin = '/usr/hdp/current/hive-client/bin'
-  hive_lib = '/usr/hdp/current/hive-client/lib'
-
-  # if this is a server action, then use the server binaries; smoke tests
-  # use the client binaries
-  command_role = default("/role", "")
-  server_role_dir_mapping = { 'HIVE_SERVER' : 'hive-server2',
-    'HIVE_METASTORE' : 'hive-metastore' }
-
-  if command_role in server_role_dir_mapping:
-    hive_server_root = server_role_dir_mapping[command_role]
-    hive_bin = format('/usr/hdp/current/{hive_server_root}/bin')
-    hive_lib = format('/usr/hdp/current/{hive_server_root}/lib')
-
-  # there are no client versions of these, use server versions directly
-  hcat_lib = '/usr/hdp/current/hive-webhcat/share/hcatalog'
-  webhcat_bin_dir = '/usr/hdp/current/hive-webhcat/sbin'
-
-  hive_specific_configs_supported = True
-else:
-  hadoop_bin_dir = "/usr/bin"
-  hadoop_home = '/usr'
-  hadoop_streeming_jars = '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar'
-  hive_bin = '/usr/lib/hive/bin'
-  hive_lib = '/usr/lib/hive/lib/'
-  pig_tar_file = '/usr/share/HDP-webhcat/pig.tar.gz'
-  hive_tar_file = '/usr/share/HDP-webhcat/hive.tar.gz'
-  sqoop_tar_file = '/usr/share/HDP-webhcat/sqoop*.tar.gz'
-
-  if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
-    hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
-    webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
-  # for newer versions
-  else:
-    hcat_lib = '/usr/lib/hive-hcatalog/share/hcatalog'
-    webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
-    
-  hive_specific_configs_supported = False
-
-hadoop_conf_dir = "/etc/hadoop/conf"
-hive_conf_dir_prefix = "/etc/hive"
-hive_conf_dir = format("{hive_conf_dir_prefix}/conf")
-hive_client_conf_dir = format("{hive_conf_dir_prefix}/conf")
-hive_server_conf_dir = format("{hive_conf_dir_prefix}/conf.server")
-limits_conf_dir = "/etc/security/limits.d"
-
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
-  hcat_conf_dir = '/etc/hcatalog/conf'
-  config_dir = '/etc/hcatalog/conf'
-# for newer versions
-else:
-  hcat_conf_dir = '/etc/hive-hcatalog/conf'
-  config_dir = '/etc/hive-webhcat/conf'
-
-execute_path = os.environ['PATH'] + os.pathsep + hive_bin + os.pathsep + hadoop_bin_dir
-hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
-hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
-
-webhcat_conf_dir = status_params.webhcat_conf_dir
-hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']
-hive_metastore_db_type = config['configurations']['hive-env']['hive_database_type']
-#HACK Temporarily use dbType=azuredb while invoking schematool
-if hive_metastore_db_type == "mssql":
-  hive_metastore_db_type = "azuredb"
-
-#users
-hive_user = config['configurations']['hive-env']['hive_user']
-#JDBC driver jar name
-hive_jdbc_driver = config['configurations']['hive-site']['javax.jdo.option.ConnectionDriverName']
-if hive_jdbc_driver == "com.microsoft.sqlserver.jdbc.SQLServerDriver":
-  jdbc_jar_name = "sqljdbc4.jar"
-  jdbc_symlink_name = "mssql-jdbc-driver.jar"
-elif hive_jdbc_driver == "com.mysql.jdbc.Driver":
-  jdbc_jar_name = "mysql-connector-java.jar"
-  jdbc_symlink_name = "mysql-jdbc-driver.jar"
-elif hive_jdbc_driver == "org.postgresql.Driver":
-  jdbc_jar_name = "postgresql-jdbc.jar"
-  jdbc_symlink_name = "postgres-jdbc-driver.jar"
-elif hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
-  jdbc_jar_name = "ojdbc.jar"
-  jdbc_symlink_name = "oracle-jdbc-driver.jar"
-
-check_db_connection_jar_name = "DBConnectionVerification.jar"
-check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
-hive_jdbc_drivers_list = ["com.microsoft.sqlserver.jdbc.SQLServerDriver","com.mysql.jdbc.Driver","org.postgresql.Driver","oracle.jdbc.driver.OracleDriver"]
-downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
-prepackaged_ojdbc_symlink = format("{hive_lib}/ojdbc6.jar")
-templeton_port = config['configurations']['webhcat-site']['templeton.port']
-
-
-#common
-hive_metastore_hosts = config['clusterHostInfo']['hive_metastore_host']
-hive_metastore_host = hive_metastore_hosts[0]
-hive_metastore_port = get_port_from_url(config['configurations']['hive-site']['hive.metastore.uris']) #"9083"
-hive_var_lib = '/var/lib/hive'
-ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
-hive_server_host = config['clusterHostInfo']['hive_server_host'][0]
-hive_server_hosts = config['clusterHostInfo']['hive_server_host']
-hive_transport_mode = config['configurations']['hive-site']['hive.server2.transport.mode']
-if hive_transport_mode.lower() == "http":
-  hive_server_port = config['configurations']['hive-site']['hive.server2.thrift.http.port']
-else:
-  hive_server_port = default('/configurations/hive-site/hive.server2.thrift.port',"10000")
-hive_url = format("jdbc:hive2://{hive_server_host}:{hive_server_port}")
-hive_server_principal = config['configurations']['hive-site']['hive.server2.authentication.kerberos.principal']
-hive_server2_authentication = config['configurations']['hive-site']['hive.server2.authentication']
-
-smokeuser = config['configurations']['cluster-env']['smokeuser']
-smoke_test_sql = format("{tmp_dir}/hiveserver2.sql")
-smoke_test_path = format("{tmp_dir}/hiveserver2Smoke.sh")
-smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
-
-fs_root = config['configurations']['core-site']['fs.defaultFS']
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-
-kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
-hive_metastore_keytab_path =  config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
-
-hive_server2_keytab = config['configurations']['hive-site']['hive.server2.authentication.kerberos.keytab']
-
-#hive_env
-hive_dbroot = config['configurations']['hive-env']['hive_dbroot']
-hive_log_dir = config['configurations']['hive-env']['hive_log_dir']
-hive_pid_dir = status_params.hive_pid_dir
-hive_pid = status_params.hive_pid
-#Default conf dir for client
-hive_conf_dirs_list = [hive_client_conf_dir]
-
-if hostname in hive_metastore_hosts or hostname in hive_server_hosts:
-  hive_conf_dirs_list.append(hive_server_conf_dir)
-
-if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]:
-  hive_config_dir = hive_server_conf_dir
-else:
-  hive_config_dir = hive_client_conf_dir
-
-#hive-site
-hive_database_name = config['configurations']['hive-env']['hive_database_name']
-hive_database = config['configurations']['hive-env']['hive_database']
-
-#Starting hiveserver2
-start_hiveserver2_script = 'startHiveserver2.sh.j2'
-
-##Starting metastore
-start_metastore_script = 'startMetastore.sh'
-hive_metastore_pid = status_params.hive_metastore_pid
-java_share_dir = '/usr/share/java'
-driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
-
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-user_group = config['configurations']['cluster-env']['user_group']
-artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
-
-target = format("{hive_lib}/{jdbc_jar_name}")
-
-jdk_location = config['hostLevelParams']['jdk_location']
-driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
-
-start_hiveserver2_path = format("{tmp_dir}/start_hiveserver2_script")
-start_metastore_path = format("{tmp_dir}/start_metastore_script")
-
-hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
-hive_heapsize = config['configurations']['hive-site']['hive.heapsize']
-java64_home = config['hostLevelParams']['java_home']
-
-##### MYSQL
-
-db_name = config['configurations']['hive-env']['hive_database_name']
-mysql_group = 'mysql'
-mysql_host = config['clusterHostInfo']['hive_mysql_host']
-
-mysql_adduser_path = format("{tmp_dir}/addMysqlUser.sh")
-mysql_deluser_path = format("{tmp_dir}/removeMysqlUser.sh")
-
-######## Metastore Schema
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
-  init_metastore_schema = False
-else:
-  init_metastore_schema = True
-
-########## HCAT
-
-hcat_dbroot = hcat_lib
-
-hcat_user = config['configurations']['hive-env']['hcat_user']
-webhcat_user = config['configurations']['hive-env']['webhcat_user']
-
-hcat_pid_dir = status_params.hcat_pid_dir
-hcat_log_dir = config['configurations']['hive-env']['hcat_log_dir']
-hcat_env_sh_template = config['configurations']['hcat-env']['content']
-
-#hive-log4j.properties.template
-if (('hive-log4j' in config['configurations']) and ('content' in config['configurations']['hive-log4j'])):
-  log4j_props = config['configurations']['hive-log4j']['content']
-else:
-  log4j_props = None
-
-#webhcat-log4j.properties.template
-if (('webhcat-log4j' in config['configurations']) and ('content' in config['configurations']['webhcat-log4j'])):
-  log4j_webhcat_props = config['configurations']['webhcat-log4j']['content']
-else:
-  log4j_webhcat_props = None
-
-#hive-exec-log4j.properties.template
-if (('hive-exec-log4j' in config['configurations']) and ('content' in config['configurations']['hive-exec-log4j'])):
-  log4j_exec_props = config['configurations']['hive-exec-log4j']['content']
+if OSCheck.is_windows_family():
+  from params_windows import *
 else:
-  log4j_exec_props = None
-
-daemon_name = status_params.daemon_name
-process_name = status_params.process_name
-hive_env_sh_template = config['configurations']['hive-env']['content']
-
-hive_hdfs_user_dir = format("/user/{hive_user}")
-hive_hdfs_user_mode = 0700
-hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.warehouse.dir"]
-#for create_hdfs_directory
-hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-hdfs_principal_name = default('/configurations/hadoop-env/hdfs_principal_name', 'missing_principal').replace("_HOST", hostname)
-
-# Tez-related properties
-tez_user = config['configurations']['tez-env']['tez_user']
-
-# Tez jars
-tez_local_api_jars = '/usr/lib/tez/tez*.jar'
-tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
-app_dir_files = {tez_local_api_jars:None}
-
-# Tez libraries
-tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
-
-if OSCheck.is_ubuntu_family():
-  mysql_configname = '/etc/mysql/my.cnf'
-else:
-  mysql_configname = '/etc/my.cnf'
-  
-mysql_user = 'mysql'
-
-# Hive security
-hive_authorization_enabled = config['configurations']['hive-site']['hive.security.authorization.enabled']
-
-mysql_jdbc_driver_jar = "/usr/share/java/mysql-connector-java.jar"
-hive_use_existing_db = hive_database.startswith('Existing')
-hive_exclude_packages = []
-
-# There are other packages that contain /usr/share/java/mysql-connector-java.jar (like libmysql-java),
-# trying to install mysql-connector-java upon them can cause packages to conflict.
-if hive_use_existing_db:
-  hive_exclude_packages = ['mysql-connector-java', 'mysql', 'mysql-server']
-else:
-  if 'role' in config and config['role'] != "MYSQL_SERVER":
-    hive_exclude_packages = ['mysql', 'mysql-server']
-  if os.path.exists(mysql_jdbc_driver_jar):
-    hive_exclude_packages.append('mysql-connector-java')
-
-########################################################
-########### WebHCat related params #####################
-########################################################
-
-webhcat_env_sh_template = config['configurations']['webhcat-env']['content']
-templeton_log_dir = config['configurations']['hive-env']['hcat_log_dir']
-templeton_pid_dir = status_params.hcat_pid_dir
-
-webhcat_pid_file = status_params.webhcat_pid_file
-
-templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
-
-
-webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
-
-webhcat_apps_dir = "/apps/webhcat"
-
-hcat_hdfs_user_dir = format("/user/{hcat_user}")
-hcat_hdfs_user_mode = 0755
-webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
-webhcat_hdfs_user_mode = 0755
-#for create_hdfs_directory
-security_param = "true" if security_enabled else "false"
-
-import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir = hadoop_conf_dir,
-  hdfs_user = hdfs_user,
-  security_enabled = security_enabled,
-  keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
-)
-
-# ranger host
-ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
-has_ranger_admin = not len(ranger_admin_hosts) == 0
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
-  # setting flag value for ranger hive plugin
-  enable_ranger_hive = False
-  ranger_plugin_enable = default("/configurations/ranger-hive-plugin-properties/ranger-hive-plugin-enabled", "no")
-  if ranger_plugin_enable.lower() == 'yes':
-    enable_ranger_hive = True
-  elif ranger_plugin_enable.lower() == 'no':
-    enable_ranger_hive = False
-
-#ranger hive properties
-policymgr_mgr_url = default("/configurations/admin-properties/policymgr_external_url", "http://localhost:6080")
-sql_connector_jar = default("/configurations/admin-properties/SQL_CONNECTOR_JAR", "/usr/share/java/mysql-connector-java.jar")
-xa_audit_db_flavor = default("/configurations/admin-properties/DB_FLAVOR", "MYSQL")
-xa_audit_db_name = default("/configurations/admin-properties/audit_db_name", "ranger_audit")
-xa_audit_db_user = default("/configurations/admin-properties/audit_db_user", "rangerlogger")
-xa_audit_db_password = default("/configurations/admin-properties/audit_db_password", "rangerlogger")
-xa_db_host = default("/configurations/admin-properties/db_host", "localhost")
-repo_name = str(config['clusterName']) + '_hive'
-db_enabled = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.DB.IS_ENABLED", "false")
-hdfs_enabled = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.IS_ENABLED", "false")
-hdfs_dest_dir = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINATION_DIRECTORY", "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/app-type/time:yyyyMMdd")
-hdfs_buffer_dir = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY", "__REPLACE__LOG_DIR/hadoop/app-type/audit")
-hdfs_archive_dir = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY", "__REPLACE__LOG_DIR/hadoop/app-type/audit/archive")
-hdfs_dest_file = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_FILE", "hostname-audit.log")
-hdfs_dest_flush_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS", "900")
-hdfs_dest_rollover_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS", "86400")
-hdfs_dest_open_retry_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS", "60")
-hdfs_buffer_file = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_FILE", "time:yyyyMMdd-HHmm.ss.log")
-hdfs_buffer_flush_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS", "60")
-hdfs_buffer_rollover_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS", "600")
-hdfs_archive_max_file_count = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT", "10")
-ssl_keystore_file = default("/configurations/ranger-hive-plugin-properties/SSL_KEYSTORE_FILE_PATH", "/etc/hadoop/conf/ranger-plugin-keystore.jks")
-ssl_keystore_password = default("/configurations/ranger-hive-plugin-properties/SSL_KEYSTORE_PASSWORD", "myKeyFilePassword")
-ssl_truststore_file = default("/configurations/ranger-hive-plugin-properties/SSL_TRUSTSTORE_FILE_PATH", "/etc/hadoop/conf/ranger-plugin-truststore.jks")
-ssl_truststore_password = default("/configurations/ranger-hive-plugin-properties/SSL_TRUSTSTORE_PASSWORD", "changeit")
-grant_revoke = default("/configurations/ranger-hive-plugin-properties/UPDATE_XAPOLICIES_ON_GRANT_REVOKE","true")
-
-jdbc_driver_class_name = default("/configurations/ranger-hive-plugin-properties/jdbc.driverClassName","")
-common_name_for_certificate = default("/configurations/ranger-hive-plugin-properties/common.name.for.certificate", "-")
-
-repo_config_username = default("/configurations/ranger-hive-plugin-properties/REPOSITORY_CONFIG_USERNAME", "hive")
-repo_config_password = default("/configurations/ranger-hive-plugin-properties/REPOSITORY_CONFIG_PASSWORD", "hive")
-
-admin_uname = default("/configurations/ranger-env/admin_username", "admin")
-admin_password = default("/configurations/ranger-env/admin_password", "admin")
-admin_uname_password = format("{admin_uname}:{admin_password}")
-
-ambari_ranger_admin = default("/configurations/ranger-env/ranger_admin_username", "amb_ranger_admin")
-ambari_ranger_password = default("/configurations/ranger-env/ranger_admin_password", "ambari123")
-policy_user = default("/configurations/ranger-hive-plugin-properties/policy_user", "ambari-qa")
-
-#For curl command in ranger plugin to get db connector
-if xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'mysql':
-  ranger_jdbc_symlink_name = "mysql-jdbc-driver.jar"
-  ranger_jdbc_jar_name = "mysql-connector-java.jar"
-elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'oracle':
-  ranger_jdbc_jar_name = "ojdbc6.jar"
-  ranger_jdbc_symlink_name = "oracle-jdbc-driver.jar"
-
-ranger_downloaded_custom_connector = format("{tmp_dir}/{ranger_jdbc_jar_name}")
-
-ranger_driver_curl_source = format("{jdk_location}/{ranger_jdbc_symlink_name}")
-ranger_driver_curl_target = format("{java_share_dir}/{ranger_jdbc_jar_name}")
-
-if security_enabled:
-  hive_principal = hive_server_principal.replace('_HOST',hostname.lower())
+  from params_linux import *

+ 414 - 0
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py

@@ -0,0 +1,414 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from ambari_commons.constants import AMBARI_SUDO_BINARY
+from ambari_commons.os_check import OSCheck
+from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
+from resource_management.libraries.functions.default import default
+from resource_management import *
+import status_params
+import os
+
+# server configurations
+config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
+sudo = AMBARI_SUDO_BINARY
+
+stack_name = default("/hostLevelParams/stack_name", None)
+
+# node hostname
+hostname = config["hostname"]
+
+# This is expected to be of the form #.#.#.#
+stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
+hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_is_hdp21 = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.1') >= 0 and compare_versions(hdp_stack_version, '2.2') < 0
+
+# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
+version = default("/commandParams/version", None)
+
+# Hadoop params
+# TODO, this logic should initialize these parameters in a file inside the HDP 2.2 stack.
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
+  # start out with client libraries
+  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
+  hadoop_home = '/usr/hdp/current/hadoop-client'
+  hive_bin = '/usr/hdp/current/hive-client/bin'
+  hive_lib = '/usr/hdp/current/hive-client/lib'
+
+  # if this is a server action, then use the server binaries; smoke tests
+  # use the client binaries
+  command_role = default("/role", "")
+  server_role_dir_mapping = { 'HIVE_SERVER' : 'hive-server2',
+    'HIVE_METASTORE' : 'hive-metastore' }
+
+  if command_role in server_role_dir_mapping:
+    hive_server_root = server_role_dir_mapping[command_role]
+    hive_bin = format('/usr/hdp/current/{hive_server_root}/bin')
+    hive_lib = format('/usr/hdp/current/{hive_server_root}/lib')
+
+  # there are no client versions of these, use server versions directly
+  hcat_lib = '/usr/hdp/current/hive-webhcat/share/hcatalog'
+  webhcat_bin_dir = '/usr/hdp/current/hive-webhcat/sbin'
+
+  hive_specific_configs_supported = True
+else:
+  hadoop_bin_dir = "/usr/bin"
+  hadoop_home = '/usr'
+  hadoop_streeming_jars = '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar'
+  hive_bin = '/usr/lib/hive/bin'
+  hive_lib = '/usr/lib/hive/lib/'
+  pig_tar_file = '/usr/share/HDP-webhcat/pig.tar.gz'
+  hive_tar_file = '/usr/share/HDP-webhcat/hive.tar.gz'
+  sqoop_tar_file = '/usr/share/HDP-webhcat/sqoop*.tar.gz'
+
+  if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
+    hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
+    webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
+  # for newer versions
+  else:
+    hcat_lib = '/usr/lib/hive-hcatalog/share/hcatalog'
+    webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
+    
+  hive_specific_configs_supported = False
+
+hadoop_conf_dir = "/etc/hadoop/conf"
+hive_conf_dir_prefix = "/etc/hive"
+hive_conf_dir = format("{hive_conf_dir_prefix}/conf")
+hive_client_conf_dir = format("{hive_conf_dir_prefix}/conf")
+hive_server_conf_dir = format("{hive_conf_dir_prefix}/conf.server")
+limits_conf_dir = "/etc/security/limits.d"
+
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
+  hcat_conf_dir = '/etc/hcatalog/conf'
+  config_dir = '/etc/hcatalog/conf'
+# for newer versions
+else:
+  hcat_conf_dir = '/etc/hive-hcatalog/conf'
+  config_dir = '/etc/hive-webhcat/conf'
+
+execute_path = os.environ['PATH'] + os.pathsep + hive_bin + os.pathsep + hadoop_bin_dir
+hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
+hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
+
+webhcat_conf_dir = status_params.webhcat_conf_dir
+hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']
+hive_metastore_db_type = config['configurations']['hive-env']['hive_database_type']
+#HACK Temporarily use dbType=azuredb while invoking schematool
+if hive_metastore_db_type == "mssql":
+  hive_metastore_db_type = "azuredb"
+
+#users
+hive_user = config['configurations']['hive-env']['hive_user']
+#JDBC driver jar name
+hive_jdbc_driver = config['configurations']['hive-site']['javax.jdo.option.ConnectionDriverName']
+if hive_jdbc_driver == "com.microsoft.sqlserver.jdbc.SQLServerDriver":
+  jdbc_jar_name = "sqljdbc4.jar"
+  jdbc_symlink_name = "mssql-jdbc-driver.jar"
+elif hive_jdbc_driver == "com.mysql.jdbc.Driver":
+  jdbc_jar_name = "mysql-connector-java.jar"
+  jdbc_symlink_name = "mysql-jdbc-driver.jar"
+elif hive_jdbc_driver == "org.postgresql.Driver":
+  jdbc_jar_name = "postgresql-jdbc.jar"
+  jdbc_symlink_name = "postgres-jdbc-driver.jar"
+elif hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
+  jdbc_jar_name = "ojdbc.jar"
+  jdbc_symlink_name = "oracle-jdbc-driver.jar"
+
+check_db_connection_jar_name = "DBConnectionVerification.jar"
+check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
+hive_jdbc_drivers_list = ["com.microsoft.sqlserver.jdbc.SQLServerDriver","com.mysql.jdbc.Driver","org.postgresql.Driver","oracle.jdbc.driver.OracleDriver"]
+downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
+prepackaged_ojdbc_symlink = format("{hive_lib}/ojdbc6.jar")
+templeton_port = config['configurations']['webhcat-site']['templeton.port']
+
+
+#common
+hive_metastore_hosts = config['clusterHostInfo']['hive_metastore_host']
+hive_metastore_host = hive_metastore_hosts[0]
+hive_metastore_port = get_port_from_url(config['configurations']['hive-site']['hive.metastore.uris']) #"9083"
+hive_var_lib = '/var/lib/hive'
+ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
+hive_server_host = config['clusterHostInfo']['hive_server_host'][0]
+hive_server_hosts = config['clusterHostInfo']['hive_server_host']
+hive_transport_mode = config['configurations']['hive-site']['hive.server2.transport.mode']
+if hive_transport_mode.lower() == "http":
+  hive_server_port = config['configurations']['hive-site']['hive.server2.thrift.http.port']
+else:
+  hive_server_port = default('/configurations/hive-site/hive.server2.thrift.port',"10000")
+hive_url = format("jdbc:hive2://{hive_server_host}:{hive_server_port}")
+hive_server_principal = config['configurations']['hive-site']['hive.server2.authentication.kerberos.principal']
+hive_server2_authentication = config['configurations']['hive-site']['hive.server2.authentication']
+
+smokeuser = config['configurations']['cluster-env']['smokeuser']
+smoke_test_sql = format("{tmp_dir}/hiveserver2.sql")
+smoke_test_path = format("{tmp_dir}/hiveserver2Smoke.sh")
+smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
+smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
+
+fs_root = config['configurations']['core-site']['fs.defaultFS']
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+
+kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+hive_metastore_keytab_path =  config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
+
+hive_server2_keytab = config['configurations']['hive-site']['hive.server2.authentication.kerberos.keytab']
+
+#hive_env
+hive_dbroot = config['configurations']['hive-env']['hive_dbroot']
+hive_log_dir = config['configurations']['hive-env']['hive_log_dir']
+hive_pid_dir = status_params.hive_pid_dir
+hive_pid = status_params.hive_pid
+#Default conf dir for client
+hive_conf_dirs_list = [hive_client_conf_dir]
+
+if hostname in hive_metastore_hosts or hostname in hive_server_hosts:
+  hive_conf_dirs_list.append(hive_server_conf_dir)
+
+if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]:
+  hive_config_dir = hive_server_conf_dir
+else:
+  hive_config_dir = hive_client_conf_dir
+
+#hive-site
+hive_database_name = config['configurations']['hive-env']['hive_database_name']
+hive_database = config['configurations']['hive-env']['hive_database']
+
+#Starting hiveserver2
+start_hiveserver2_script = 'startHiveserver2.sh.j2'
+
+##Starting metastore
+start_metastore_script = 'startMetastore.sh'
+hive_metastore_pid = status_params.hive_metastore_pid
+java_share_dir = '/usr/share/java'
+driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
+
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+user_group = config['configurations']['cluster-env']['user_group']
+artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
+
+target = format("{hive_lib}/{jdbc_jar_name}")
+
+jdk_location = config['hostLevelParams']['jdk_location']
+driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
+
+start_hiveserver2_path = format("{tmp_dir}/start_hiveserver2_script")
+start_metastore_path = format("{tmp_dir}/start_metastore_script")
+
+hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
+hive_heapsize = config['configurations']['hive-site']['hive.heapsize']
+java64_home = config['hostLevelParams']['java_home']
+
+##### MYSQL
+
+db_name = config['configurations']['hive-env']['hive_database_name']
+mysql_group = 'mysql'
+mysql_host = config['clusterHostInfo']['hive_mysql_host']
+
+mysql_adduser_path = format("{tmp_dir}/addMysqlUser.sh")
+mysql_deluser_path = format("{tmp_dir}/removeMysqlUser.sh")
+
+######## Metastore Schema
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
+  init_metastore_schema = False
+else:
+  init_metastore_schema = True
+
+########## HCAT
+
+hcat_dbroot = hcat_lib
+
+hcat_user = config['configurations']['hive-env']['hcat_user']
+webhcat_user = config['configurations']['hive-env']['webhcat_user']
+
+hcat_pid_dir = status_params.hcat_pid_dir
+hcat_log_dir = config['configurations']['hive-env']['hcat_log_dir']
+hcat_env_sh_template = config['configurations']['hcat-env']['content']
+
+#hive-log4j.properties.template
+if (('hive-log4j' in config['configurations']) and ('content' in config['configurations']['hive-log4j'])):
+  log4j_props = config['configurations']['hive-log4j']['content']
+else:
+  log4j_props = None
+
+#webhcat-log4j.properties.template
+if (('webhcat-log4j' in config['configurations']) and ('content' in config['configurations']['webhcat-log4j'])):
+  log4j_webhcat_props = config['configurations']['webhcat-log4j']['content']
+else:
+  log4j_webhcat_props = None
+
+#hive-exec-log4j.properties.template
+if (('hive-exec-log4j' in config['configurations']) and ('content' in config['configurations']['hive-exec-log4j'])):
+  log4j_exec_props = config['configurations']['hive-exec-log4j']['content']
+else:
+  log4j_exec_props = None
+
+daemon_name = status_params.daemon_name
+process_name = status_params.process_name
+hive_env_sh_template = config['configurations']['hive-env']['content']
+
+hive_hdfs_user_dir = format("/user/{hive_user}")
+hive_hdfs_user_mode = 0700
+hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.warehouse.dir"]
+#for create_hdfs_directory
+hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
+hdfs_principal_name = default('/configurations/hadoop-env/hdfs_principal_name', 'missing_principal').replace("_HOST", hostname)
+
+# Tez-related properties
+tez_user = config['configurations']['tez-env']['tez_user']
+
+# Tez jars
+tez_local_api_jars = '/usr/lib/tez/tez*.jar'
+tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
+app_dir_files = {tez_local_api_jars:None}
+
+# Tez libraries
+tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
+
+if OSCheck.is_ubuntu_family():
+  mysql_configname = '/etc/mysql/my.cnf'
+else:
+  mysql_configname = '/etc/my.cnf'
+  
+mysql_user = 'mysql'
+
+# Hive security
+hive_authorization_enabled = config['configurations']['hive-site']['hive.security.authorization.enabled']
+
+mysql_jdbc_driver_jar = "/usr/share/java/mysql-connector-java.jar"
+hive_use_existing_db = hive_database.startswith('Existing')
+hive_exclude_packages = []
+
+# There are other packages that contain /usr/share/java/mysql-connector-java.jar (like libmysql-java),
+# trying to install mysql-connector-java upon them can cause packages to conflict.
+if hive_use_existing_db:
+  hive_exclude_packages = ['mysql-connector-java', 'mysql', 'mysql-server']
+else:
+  if 'role' in config and config['role'] != "MYSQL_SERVER":
+    hive_exclude_packages = ['mysql', 'mysql-server']
+  if os.path.exists(mysql_jdbc_driver_jar):
+    hive_exclude_packages.append('mysql-connector-java')
+
+########################################################
+########### WebHCat related params #####################
+########################################################
+
+webhcat_env_sh_template = config['configurations']['webhcat-env']['content']
+templeton_log_dir = config['configurations']['hive-env']['hcat_log_dir']
+templeton_pid_dir = status_params.hcat_pid_dir
+
+webhcat_pid_file = status_params.webhcat_pid_file
+
+templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
+
+
+webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
+
+webhcat_apps_dir = "/apps/webhcat"
+
+hcat_hdfs_user_dir = format("/user/{hcat_user}")
+hcat_hdfs_user_mode = 0755
+webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
+webhcat_hdfs_user_mode = 0755
+#for create_hdfs_directory
+security_param = "true" if security_enabled else "false"
+
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir = hadoop_conf_dir,
+  hdfs_user = hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local,
+  bin_dir = hadoop_bin_dir
+)
+
+# ranger host
+ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
+has_ranger_admin = not len(ranger_admin_hosts) == 0
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
+  # setting flag value for ranger hive plugin
+  enable_ranger_hive = False
+  ranger_plugin_enable = default("/configurations/ranger-hive-plugin-properties/ranger-hive-plugin-enabled", "no")
+  if ranger_plugin_enable.lower() == 'yes':
+    enable_ranger_hive = True
+  elif ranger_plugin_enable.lower() == 'no':
+    enable_ranger_hive = False
+
+#ranger hive properties
+policymgr_mgr_url = default("/configurations/admin-properties/policymgr_external_url", "http://localhost:6080")
+sql_connector_jar = default("/configurations/admin-properties/SQL_CONNECTOR_JAR", "/usr/share/java/mysql-connector-java.jar")
+xa_audit_db_flavor = default("/configurations/admin-properties/DB_FLAVOR", "MYSQL")
+xa_audit_db_name = default("/configurations/admin-properties/audit_db_name", "ranger_audit")
+xa_audit_db_user = default("/configurations/admin-properties/audit_db_user", "rangerlogger")
+xa_audit_db_password = default("/configurations/admin-properties/audit_db_password", "rangerlogger")
+xa_db_host = default("/configurations/admin-properties/db_host", "localhost")
+repo_name = str(config['clusterName']) + '_hive'
+db_enabled = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.DB.IS_ENABLED", "false")
+hdfs_enabled = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.IS_ENABLED", "false")
+hdfs_dest_dir = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINATION_DIRECTORY", "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/app-type/time:yyyyMMdd")
+hdfs_buffer_dir = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY", "__REPLACE__LOG_DIR/hadoop/app-type/audit")
+hdfs_archive_dir = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY", "__REPLACE__LOG_DIR/hadoop/app-type/audit/archive")
+hdfs_dest_file = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_FILE", "hostname-audit.log")
+hdfs_dest_flush_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS", "900")
+hdfs_dest_rollover_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS", "86400")
+hdfs_dest_open_retry_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS", "60")
+hdfs_buffer_file = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_FILE", "time:yyyyMMdd-HHmm.ss.log")
+hdfs_buffer_flush_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS", "60")
+hdfs_buffer_rollover_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS", "600")
+hdfs_archive_max_file_count = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT", "10")
+ssl_keystore_file = default("/configurations/ranger-hive-plugin-properties/SSL_KEYSTORE_FILE_PATH", "/etc/hadoop/conf/ranger-plugin-keystore.jks")
+ssl_keystore_password = default("/configurations/ranger-hive-plugin-properties/SSL_KEYSTORE_PASSWORD", "myKeyFilePassword")
+ssl_truststore_file = default("/configurations/ranger-hive-plugin-properties/SSL_TRUSTSTORE_FILE_PATH", "/etc/hadoop/conf/ranger-plugin-truststore.jks")
+ssl_truststore_password = default("/configurations/ranger-hive-plugin-properties/SSL_TRUSTSTORE_PASSWORD", "changeit")
+grant_revoke = default("/configurations/ranger-hive-plugin-properties/UPDATE_XAPOLICIES_ON_GRANT_REVOKE","true")
+
+jdbc_driver_class_name = default("/configurations/ranger-hive-plugin-properties/jdbc.driverClassName","")
+common_name_for_certificate = default("/configurations/ranger-hive-plugin-properties/common.name.for.certificate", "-")
+
+repo_config_username = default("/configurations/ranger-hive-plugin-properties/REPOSITORY_CONFIG_USERNAME", "hive")
+repo_config_password = default("/configurations/ranger-hive-plugin-properties/REPOSITORY_CONFIG_PASSWORD", "hive")
+
+admin_uname = default("/configurations/ranger-env/admin_username", "admin")
+admin_password = default("/configurations/ranger-env/admin_password", "admin")
+admin_uname_password = format("{admin_uname}:{admin_password}")
+
+ambari_ranger_admin = default("/configurations/ranger-env/ranger_admin_username", "amb_ranger_admin")
+ambari_ranger_password = default("/configurations/ranger-env/ranger_admin_password", "ambari123")
+policy_user = default("/configurations/ranger-hive-plugin-properties/policy_user", "ambari-qa")
+
+#For curl command in ranger plugin to get db connector
+if xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'mysql':
+  ranger_jdbc_symlink_name = "mysql-jdbc-driver.jar"
+  ranger_jdbc_jar_name = "mysql-connector-java.jar"
+elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'oracle':
+  ranger_jdbc_jar_name = "ojdbc6.jar"
+  ranger_jdbc_symlink_name = "oracle-jdbc-driver.jar"
+
+ranger_downloaded_custom_connector = format("{tmp_dir}/{ranger_jdbc_jar_name}")
+
+ranger_driver_curl_source = format("{jdk_location}/{ranger_jdbc_symlink_name}")
+ranger_driver_curl_target = format("{java_share_dir}/{ranger_jdbc_jar_name}")
+
+if security_enabled:
+  hive_principal = hive_server_principal.replace('_HOST',hostname.lower())

+ 4 - 5
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/params.py → ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py

@@ -19,6 +19,7 @@ limitations under the License.
 """
 
 from resource_management import *
+from status_params import *
 
 # server configurations
 config = Script.get_config()
@@ -47,9 +48,7 @@ hive_metastore_db_type = config['configurations']['hive-env']['hive_database_typ
 hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
 hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']
 
-######## Metastore Schema
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
-  init_metastore_schema = False
-else:
-  init_metastore_schema = True
+hive_exclude_packages = []
 
+######## Metastore Schema
+init_metastore_schema = config['configurations']['hive-site']['datanucleus.autoCreateSchema']

+ 21 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/service_check.py

@@ -21,11 +21,31 @@ limitations under the License.
 from resource_management import *
 import socket
 import sys
-
 from hcat_service_check import hcat_service_check
 from webhcat_service_check import webhcat_service_check
+from ambari_commons import OSConst
+from ambari_commons.os_family_impl import OsFamilyImpl
+
 
 class HiveServiceCheck(Script):
+  pass
+
+
+@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
+class HiveServiceCheckWindows(HiveServiceCheck):
+  def service_check(self, env):
+    import params
+    env.set_params(params)
+    smoke_cmd = os.path.join(params.hdp_root,"Run-SmokeTests.cmd")
+    service = "HIVE"
+    Execute(format("cmd /C {smoke_cmd} {service}"), user=params.hive_user, logoutput=True)
+
+    hcat_service_check()
+    webhcat_service_check()
+
+
+@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
+class HiveServiceCheckDefault(HiveServiceCheck):
   def service_check(self, env):
     import params
     env.set_params(params)

+ 30 - 25
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py

@@ -23,29 +23,34 @@ from ambari_commons.os_check import OSCheck
 
 config = Script.get_config()
 
-hive_pid_dir = config['configurations']['hive-env']['hive_pid_dir']
-hive_pid = 'hive-server.pid'
-
-hive_metastore_pid = 'hive.pid'
-
-hcat_pid_dir = config['configurations']['hive-env']['hcat_pid_dir'] #hcat_pid_dir
-webhcat_pid_file = format('{hcat_pid_dir}/webhcat.pid')
-
-process_name = 'mysqld'
-if OSCheck.is_suse_family() or OSCheck.is_ubuntu_family():
-  daemon_name = 'mysql'
+if OSCheck.is_windows_family():
+  hive_metastore_win_service_name = "metastore"
+  hive_client_win_service_name = "hwi"
+  hive_server_win_service_name = "hiveserver2"
+  webhcat_server_win_service_name = "templeton"
 else:
-  daemon_name = 'mysqld'
-
-
-# Security related/required params
-hostname = config['hostname']
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-hadoop_conf_dir = "/etc/hadoop/conf"
-kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
-tmp_dir = Script.get_tmp_dir()
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-hive_user = config['configurations']['hive-env']['hive_user']
-hive_conf_dir = "/etc/hive/conf"
-webhcat_user = config['configurations']['hive-env']['webhcat_user']
-webhcat_conf_dir = '/etc/hive-webhcat/conf'
+  hive_pid_dir = config['configurations']['hive-env']['hive_pid_dir']
+  hive_pid = 'hive-server.pid'
+
+  hive_metastore_pid = 'hive.pid'
+
+  hcat_pid_dir = config['configurations']['hive-env']['hcat_pid_dir'] #hcat_pid_dir
+  webhcat_pid_file = format('{hcat_pid_dir}/webhcat.pid')
+
+  process_name = 'mysqld'
+  if OSCheck.is_suse_family() or OSCheck.is_ubuntu_family():
+    daemon_name = 'mysql'
+  else:
+    daemon_name = 'mysqld'
+
+  # Security related/required params
+  hostname = config['hostname']
+  security_enabled = config['configurations']['cluster-env']['security_enabled']
+  hadoop_conf_dir = "/etc/hadoop/conf"
+  kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+  tmp_dir = Script.get_tmp_dir()
+  hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+  hive_user = config['configurations']['hive-env']['hive_user']
+  hive_conf_dir = "/etc/hive/conf"
+  webhcat_user = config['configurations']['hive-env']['webhcat_user']
+  webhcat_conf_dir = '/etc/hive-webhcat/conf'

+ 11 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py

@@ -21,13 +21,23 @@ Ambari Agent
 import sys
 import os.path
 import glob
-
 from resource_management import *
 from resource_management.core.resources.system import Execute
 from resource_management.libraries.functions.version import compare_versions
 from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+from ambari_commons import OSConst
+
+@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
+def webhcat():
+  import params
+  XmlConfig("webhcat-site.xml",
+            conf_dir=params.hcat_config_dir,
+            configurations=params.config['configurations']['webhcat-site']
+  )
 
 
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 def webhcat():
   import params
 

+ 22 - 15
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py

@@ -24,42 +24,49 @@ from resource_management.libraries.functions.security_commons import build_expec
   FILE_TYPE_XML
 from webhcat import webhcat
 from webhcat_service import webhcat_service
+from ambari_commons import OSConst
+from ambari_commons.os_family_impl import OsFamilyImpl
 
-class WebHCatServer(Script):
-
-  def get_stack_to_component(self):
-    return {"HDP": "hive-webhcat"}
 
+class WebHCatServer(Script):
   def install(self, env):
     import params
     self.install_packages(env, exclude_packages=params.hive_exclude_packages)
 
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    webhcat()
-
-
   def start(self, env, rolling_restart=False):
     import params
     env.set_params(params)
     self.configure(env) # FOR SECURITY
-    webhcat_service(action = 'start')
-
+    webhcat_service(action='start')
 
   def stop(self, env, rolling_restart=False):
     import params
     env.set_params(params)
+    webhcat_service(action='stop')
 
-    webhcat_service(action = 'stop')
+  def configure(self, env):
+    import params
+    env.set_params(params)
+    webhcat()
 
 
+@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
+class WebHCatServerWindows(WebHCatServer):
   def status(self, env):
     import status_params
     env.set_params(status_params)
-    check_process_status(status_params.webhcat_pid_file)
+    check_windows_service_status(status_params.webhcat_server_win_service_name)
+
 
+@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
+class WebHCatServerDefault(WebHCatServer):
+  def get_stack_to_component(self):
+    return {"HDP": "hive-webhcat"}
+
+  def status(self, env):
+    import status_params
+    env.set_params(status_params)
+    check_process_status(status_params.webhcat_pid_file)
 
   def pre_rolling_restart(self, env):
     Logger.info("Executing WebHCat Rolling Upgrade pre-restart")

+ 10 - 0
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py

@@ -19,7 +19,17 @@ Ambari Agent
 
 """
 from resource_management import *
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+from ambari_commons import OSConst
 
+@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
+def webhcat_service(action='start'):
+  import params
+  if action == 'start' or action == 'stop':
+    Service(params.webhcat_server_win_service_name, action=action)
+
+
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 def webhcat_service(action='start'):
   import params
 

+ 11 - 0
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py

@@ -19,7 +19,18 @@ limitations under the License.
 """
 
 from resource_management import *
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+from ambari_commons import OSConst
 
+@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
+def webhcat_service_check():
+  import params
+  smoke_cmd = os.path.join(params.hdp_root,"Run-SmokeTests.cmd")
+  service = "WEBHCAT"
+  Execute(format("cmd /C {smoke_cmd} {service}"), user=params.hcat_user, logoutput=True)
+
+
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 def webhcat_service_check():
   import params
   File(format("{tmp_dir}/templetonSmoke.sh"),

+ 0 - 777
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/etc/hive-schema-0.12.0.mysql.sql

@@ -1,777 +0,0 @@
--- MySQL dump 10.13  Distrib 5.5.25, for osx10.6 (i386)
---
--- Host: localhost    Database: test
--- ------------------------------------------------------
--- Server version	5.5.25
-
-/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
-/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
-/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
-/*!40101 SET NAMES utf8 */;
-/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
-/*!40103 SET TIME_ZONE='+00:00' */;
-/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
-/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
-/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
-/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
-
---
--- Table structure for table `BUCKETING_COLS`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `BUCKETING_COLS` (
-  `SD_ID` bigint(20) NOT NULL,
-  `BUCKET_COL_NAME` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `INTEGER_IDX` int(11) NOT NULL,
-  PRIMARY KEY (`SD_ID`,`INTEGER_IDX`),
-  KEY `BUCKETING_COLS_N49` (`SD_ID`),
-  CONSTRAINT `BUCKETING_COLS_FK1` FOREIGN KEY (`SD_ID`) REFERENCES `SDS` (`SD_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `CDS`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `CDS` (
-  `CD_ID` bigint(20) NOT NULL,
-  PRIMARY KEY (`CD_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `COLUMNS_V2`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `COLUMNS_V2` (
-  `CD_ID` bigint(20) NOT NULL,
-  `COMMENT` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `COLUMN_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
-  `TYPE_NAME` varchar(4000) DEFAULT NULL,
-  `INTEGER_IDX` int(11) NOT NULL,
-  PRIMARY KEY (`CD_ID`,`COLUMN_NAME`),
-  KEY `COLUMNS_V2_N49` (`CD_ID`),
-  CONSTRAINT `COLUMNS_V2_FK1` FOREIGN KEY (`CD_ID`) REFERENCES `CDS` (`CD_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `DATABASE_PARAMS`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `DATABASE_PARAMS` (
-  `DB_ID` bigint(20) NOT NULL,
-  `PARAM_KEY` varchar(180) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
-  `PARAM_VALUE` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  PRIMARY KEY (`DB_ID`,`PARAM_KEY`),
-  KEY `DATABASE_PARAMS_N49` (`DB_ID`),
-  CONSTRAINT `DATABASE_PARAMS_FK1` FOREIGN KEY (`DB_ID`) REFERENCES `DBS` (`DB_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `DBS`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `DBS` (
-  `DB_ID` bigint(20) NOT NULL,
-  `DESC` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `DB_LOCATION_URI` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
-  `NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  PRIMARY KEY (`DB_ID`),
-  UNIQUE KEY `UNIQUE_DATABASE` (`NAME`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `DB_PRIVS`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `DB_PRIVS` (
-  `DB_GRANT_ID` bigint(20) NOT NULL,
-  `CREATE_TIME` int(11) NOT NULL,
-  `DB_ID` bigint(20) DEFAULT NULL,
-  `GRANT_OPTION` smallint(6) NOT NULL,
-  `GRANTOR` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `GRANTOR_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PRINCIPAL_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PRINCIPAL_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `DB_PRIV` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  PRIMARY KEY (`DB_GRANT_ID`),
-  UNIQUE KEY `DBPRIVILEGEINDEX` (`DB_ID`,`PRINCIPAL_NAME`,`PRINCIPAL_TYPE`,`DB_PRIV`,`GRANTOR`,`GRANTOR_TYPE`),
-  KEY `DB_PRIVS_N49` (`DB_ID`),
-  CONSTRAINT `DB_PRIVS_FK1` FOREIGN KEY (`DB_ID`) REFERENCES `DBS` (`DB_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `GLOBAL_PRIVS`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `GLOBAL_PRIVS` (
-  `USER_GRANT_ID` bigint(20) NOT NULL,
-  `CREATE_TIME` int(11) NOT NULL,
-  `GRANT_OPTION` smallint(6) NOT NULL,
-  `GRANTOR` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `GRANTOR_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PRINCIPAL_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PRINCIPAL_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `USER_PRIV` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  PRIMARY KEY (`USER_GRANT_ID`),
-  UNIQUE KEY `GLOBALPRIVILEGEINDEX` (`PRINCIPAL_NAME`,`PRINCIPAL_TYPE`,`USER_PRIV`,`GRANTOR`,`GRANTOR_TYPE`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `IDXS`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `IDXS` (
-  `INDEX_ID` bigint(20) NOT NULL,
-  `CREATE_TIME` int(11) NOT NULL,
-  `DEFERRED_REBUILD` bit(1) NOT NULL,
-  `INDEX_HANDLER_CLASS` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `INDEX_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `INDEX_TBL_ID` bigint(20) DEFAULT NULL,
-  `LAST_ACCESS_TIME` int(11) NOT NULL,
-  `ORIG_TBL_ID` bigint(20) DEFAULT NULL,
-  `SD_ID` bigint(20) DEFAULT NULL,
-  PRIMARY KEY (`INDEX_ID`),
-  UNIQUE KEY `UNIQUEINDEX` (`INDEX_NAME`,`ORIG_TBL_ID`),
-  KEY `IDXS_N51` (`SD_ID`),
-  KEY `IDXS_N50` (`INDEX_TBL_ID`),
-  KEY `IDXS_N49` (`ORIG_TBL_ID`),
-  CONSTRAINT `IDXS_FK1` FOREIGN KEY (`ORIG_TBL_ID`) REFERENCES `TBLS` (`TBL_ID`),
-  CONSTRAINT `IDXS_FK2` FOREIGN KEY (`SD_ID`) REFERENCES `SDS` (`SD_ID`),
-  CONSTRAINT `IDXS_FK3` FOREIGN KEY (`INDEX_TBL_ID`) REFERENCES `TBLS` (`TBL_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `INDEX_PARAMS`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `INDEX_PARAMS` (
-  `INDEX_ID` bigint(20) NOT NULL,
-  `PARAM_KEY` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
-  `PARAM_VALUE` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  PRIMARY KEY (`INDEX_ID`,`PARAM_KEY`),
-  KEY `INDEX_PARAMS_N49` (`INDEX_ID`),
-  CONSTRAINT `INDEX_PARAMS_FK1` FOREIGN KEY (`INDEX_ID`) REFERENCES `IDXS` (`INDEX_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `NUCLEUS_TABLES`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `NUCLEUS_TABLES` (
-  `CLASS_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
-  `TABLE_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
-  `TYPE` varchar(4) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
-  `OWNER` varchar(2) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
-  `VERSION` varchar(20) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
-  `INTERFACE_NAME` varchar(255) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  PRIMARY KEY (`CLASS_NAME`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `PARTITIONS`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `PARTITIONS` (
-  `PART_ID` bigint(20) NOT NULL,
-  `CREATE_TIME` int(11) NOT NULL,
-  `LAST_ACCESS_TIME` int(11) NOT NULL,
-  `PART_NAME` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `SD_ID` bigint(20) DEFAULT NULL,
-  `TBL_ID` bigint(20) DEFAULT NULL,
-  `LINK_TARGET_ID` bigint(20) DEFAULT NULL,
-  PRIMARY KEY (`PART_ID`),
-  UNIQUE KEY `UNIQUEPARTITION` (`PART_NAME`,`TBL_ID`),
-  KEY `PARTITIONS_N49` (`TBL_ID`),
-  KEY `PARTITIONS_N50` (`SD_ID`),
-  KEY `PARTITIONS_N51` (`LINK_TARGET_ID`),
-  CONSTRAINT `PARTITIONS_FK1` FOREIGN KEY (`TBL_ID`) REFERENCES `TBLS` (`TBL_ID`),
-  CONSTRAINT `PARTITIONS_FK2` FOREIGN KEY (`SD_ID`) REFERENCES `SDS` (`SD_ID`),
-  CONSTRAINT `PARTITIONS_FK3` FOREIGN KEY (`LINK_TARGET_ID`) REFERENCES `PARTITIONS` (`PART_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `PARTITION_EVENTS`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `PARTITION_EVENTS` (
-  `PART_NAME_ID` bigint(20) NOT NULL,
-  `DB_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `EVENT_TIME` bigint(20) NOT NULL,
-  `EVENT_TYPE` int(11) NOT NULL,
-  `PARTITION_NAME` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `TBL_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  PRIMARY KEY (`PART_NAME_ID`),
-  KEY `PARTITIONEVENTINDEX` (`PARTITION_NAME`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `PARTITION_KEYS`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `PARTITION_KEYS` (
-  `TBL_ID` bigint(20) NOT NULL,
-  `PKEY_COMMENT` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PKEY_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
-  `PKEY_TYPE` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
-  `INTEGER_IDX` int(11) NOT NULL,
-  PRIMARY KEY (`TBL_ID`,`PKEY_NAME`),
-  KEY `PARTITION_KEYS_N49` (`TBL_ID`),
-  CONSTRAINT `PARTITION_KEYS_FK1` FOREIGN KEY (`TBL_ID`) REFERENCES `TBLS` (`TBL_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `PARTITION_KEY_VALS`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `PARTITION_KEY_VALS` (
-  `PART_ID` bigint(20) NOT NULL,
-  `PART_KEY_VAL` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `INTEGER_IDX` int(11) NOT NULL,
-  PRIMARY KEY (`PART_ID`,`INTEGER_IDX`),
-  KEY `PARTITION_KEY_VALS_N49` (`PART_ID`),
-  CONSTRAINT `PARTITION_KEY_VALS_FK1` FOREIGN KEY (`PART_ID`) REFERENCES `PARTITIONS` (`PART_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `PARTITION_PARAMS`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `PARTITION_PARAMS` (
-  `PART_ID` bigint(20) NOT NULL,
-  `PARAM_KEY` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
-  `PARAM_VALUE` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  PRIMARY KEY (`PART_ID`,`PARAM_KEY`),
-  KEY `PARTITION_PARAMS_N49` (`PART_ID`),
-  CONSTRAINT `PARTITION_PARAMS_FK1` FOREIGN KEY (`PART_ID`) REFERENCES `PARTITIONS` (`PART_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `PART_COL_PRIVS`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `PART_COL_PRIVS` (
-  `PART_COLUMN_GRANT_ID` bigint(20) NOT NULL,
-  `COLUMN_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `CREATE_TIME` int(11) NOT NULL,
-  `GRANT_OPTION` smallint(6) NOT NULL,
-  `GRANTOR` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `GRANTOR_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PART_ID` bigint(20) DEFAULT NULL,
-  `PRINCIPAL_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PRINCIPAL_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PART_COL_PRIV` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  PRIMARY KEY (`PART_COLUMN_GRANT_ID`),
-  KEY `PART_COL_PRIVS_N49` (`PART_ID`),
-  KEY `PARTITIONCOLUMNPRIVILEGEINDEX` (`PART_ID`,`COLUMN_NAME`,`PRINCIPAL_NAME`,`PRINCIPAL_TYPE`,`PART_COL_PRIV`,`GRANTOR`,`GRANTOR_TYPE`),
-  CONSTRAINT `PART_COL_PRIVS_FK1` FOREIGN KEY (`PART_ID`) REFERENCES `PARTITIONS` (`PART_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `PART_PRIVS`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `PART_PRIVS` (
-  `PART_GRANT_ID` bigint(20) NOT NULL,
-  `CREATE_TIME` int(11) NOT NULL,
-  `GRANT_OPTION` smallint(6) NOT NULL,
-  `GRANTOR` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `GRANTOR_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PART_ID` bigint(20) DEFAULT NULL,
-  `PRINCIPAL_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PRINCIPAL_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PART_PRIV` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  PRIMARY KEY (`PART_GRANT_ID`),
-  KEY `PARTPRIVILEGEINDEX` (`PART_ID`,`PRINCIPAL_NAME`,`PRINCIPAL_TYPE`,`PART_PRIV`,`GRANTOR`,`GRANTOR_TYPE`),
-  KEY `PART_PRIVS_N49` (`PART_ID`),
-  CONSTRAINT `PART_PRIVS_FK1` FOREIGN KEY (`PART_ID`) REFERENCES `PARTITIONS` (`PART_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `ROLES`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `ROLES` (
-  `ROLE_ID` bigint(20) NOT NULL,
-  `CREATE_TIME` int(11) NOT NULL,
-  `OWNER_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `ROLE_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  PRIMARY KEY (`ROLE_ID`),
-  UNIQUE KEY `ROLEENTITYINDEX` (`ROLE_NAME`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `ROLE_MAP`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `ROLE_MAP` (
-  `ROLE_GRANT_ID` bigint(20) NOT NULL,
-  `ADD_TIME` int(11) NOT NULL,
-  `GRANT_OPTION` smallint(6) NOT NULL,
-  `GRANTOR` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `GRANTOR_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PRINCIPAL_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PRINCIPAL_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `ROLE_ID` bigint(20) DEFAULT NULL,
-  PRIMARY KEY (`ROLE_GRANT_ID`),
-  UNIQUE KEY `USERROLEMAPINDEX` (`PRINCIPAL_NAME`,`ROLE_ID`,`GRANTOR`,`GRANTOR_TYPE`),
-  KEY `ROLE_MAP_N49` (`ROLE_ID`),
-  CONSTRAINT `ROLE_MAP_FK1` FOREIGN KEY (`ROLE_ID`) REFERENCES `ROLES` (`ROLE_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `SDS`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `SDS` (
-  `SD_ID` bigint(20) NOT NULL,
-  `CD_ID` bigint(20) DEFAULT NULL,
-  `INPUT_FORMAT` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `IS_COMPRESSED` bit(1) NOT NULL,
-  `IS_STOREDASSUBDIRECTORIES` bit(1) NOT NULL,
-  `LOCATION` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `NUM_BUCKETS` int(11) NOT NULL,
-  `OUTPUT_FORMAT` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `SERDE_ID` bigint(20) DEFAULT NULL,
-  PRIMARY KEY (`SD_ID`),
-  KEY `SDS_N49` (`SERDE_ID`),
-  KEY `SDS_N50` (`CD_ID`),
-  CONSTRAINT `SDS_FK1` FOREIGN KEY (`SERDE_ID`) REFERENCES `SERDES` (`SERDE_ID`),
-  CONSTRAINT `SDS_FK2` FOREIGN KEY (`CD_ID`) REFERENCES `CDS` (`CD_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `SD_PARAMS`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `SD_PARAMS` (
-  `SD_ID` bigint(20) NOT NULL,
-  `PARAM_KEY` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
-  `PARAM_VALUE` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  PRIMARY KEY (`SD_ID`,`PARAM_KEY`),
-  KEY `SD_PARAMS_N49` (`SD_ID`),
-  CONSTRAINT `SD_PARAMS_FK1` FOREIGN KEY (`SD_ID`) REFERENCES `SDS` (`SD_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `SEQUENCE_TABLE`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `SEQUENCE_TABLE` (
-  `SEQUENCE_NAME` varchar(255) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
-  `NEXT_VAL` bigint(20) NOT NULL,
-  PRIMARY KEY (`SEQUENCE_NAME`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `SERDES`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `SERDES` (
-  `SERDE_ID` bigint(20) NOT NULL,
-  `NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `SLIB` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  PRIMARY KEY (`SERDE_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `SERDE_PARAMS`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `SERDE_PARAMS` (
-  `SERDE_ID` bigint(20) NOT NULL,
-  `PARAM_KEY` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
-  `PARAM_VALUE` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  PRIMARY KEY (`SERDE_ID`,`PARAM_KEY`),
-  KEY `SERDE_PARAMS_N49` (`SERDE_ID`),
-  CONSTRAINT `SERDE_PARAMS_FK1` FOREIGN KEY (`SERDE_ID`) REFERENCES `SERDES` (`SERDE_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `SKEWED_COL_NAMES`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `SKEWED_COL_NAMES` (
-  `SD_ID` bigint(20) NOT NULL,
-  `SKEWED_COL_NAME` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `INTEGER_IDX` int(11) NOT NULL,
-  PRIMARY KEY (`SD_ID`,`INTEGER_IDX`),
-  KEY `SKEWED_COL_NAMES_N49` (`SD_ID`),
-  CONSTRAINT `SKEWED_COL_NAMES_FK1` FOREIGN KEY (`SD_ID`) REFERENCES `SDS` (`SD_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `SKEWED_COL_VALUE_LOC_MAP`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `SKEWED_COL_VALUE_LOC_MAP` (
-  `SD_ID` bigint(20) NOT NULL,
-  `STRING_LIST_ID_KID` bigint(20) NOT NULL,
-  `LOCATION` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  PRIMARY KEY (`SD_ID`,`STRING_LIST_ID_KID`),
-  KEY `SKEWED_COL_VALUE_LOC_MAP_N49` (`STRING_LIST_ID_KID`),
-  KEY `SKEWED_COL_VALUE_LOC_MAP_N50` (`SD_ID`),
-  CONSTRAINT `SKEWED_COL_VALUE_LOC_MAP_FK2` FOREIGN KEY (`STRING_LIST_ID_KID`) REFERENCES `SKEWED_STRING_LIST` (`STRING_LIST_ID`),
-  CONSTRAINT `SKEWED_COL_VALUE_LOC_MAP_FK1` FOREIGN KEY (`SD_ID`) REFERENCES `SDS` (`SD_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `SKEWED_STRING_LIST`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `SKEWED_STRING_LIST` (
-  `STRING_LIST_ID` bigint(20) NOT NULL,
-  PRIMARY KEY (`STRING_LIST_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `SKEWED_STRING_LIST_VALUES`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `SKEWED_STRING_LIST_VALUES` (
-  `STRING_LIST_ID` bigint(20) NOT NULL,
-  `STRING_LIST_VALUE` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `INTEGER_IDX` int(11) NOT NULL,
-  PRIMARY KEY (`STRING_LIST_ID`,`INTEGER_IDX`),
-  KEY `SKEWED_STRING_LIST_VALUES_N49` (`STRING_LIST_ID`),
-  CONSTRAINT `SKEWED_STRING_LIST_VALUES_FK1` FOREIGN KEY (`STRING_LIST_ID`) REFERENCES `SKEWED_STRING_LIST` (`STRING_LIST_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `SKEWED_VALUES`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `SKEWED_VALUES` (
-  `SD_ID_OID` bigint(20) NOT NULL,
-  `STRING_LIST_ID_EID` bigint(20) NOT NULL,
-  `INTEGER_IDX` int(11) NOT NULL,
-  PRIMARY KEY (`SD_ID_OID`,`INTEGER_IDX`),
-  KEY `SKEWED_VALUES_N50` (`SD_ID_OID`),
-  KEY `SKEWED_VALUES_N49` (`STRING_LIST_ID_EID`),
-  CONSTRAINT `SKEWED_VALUES_FK2` FOREIGN KEY (`STRING_LIST_ID_EID`) REFERENCES `SKEWED_STRING_LIST` (`STRING_LIST_ID`),
-  CONSTRAINT `SKEWED_VALUES_FK1` FOREIGN KEY (`SD_ID_OID`) REFERENCES `SDS` (`SD_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `SORT_COLS`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `SORT_COLS` (
-  `SD_ID` bigint(20) NOT NULL,
-  `COLUMN_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `ORDER` int(11) NOT NULL,
-  `INTEGER_IDX` int(11) NOT NULL,
-  PRIMARY KEY (`SD_ID`,`INTEGER_IDX`),
-  KEY `SORT_COLS_N49` (`SD_ID`),
-  CONSTRAINT `SORT_COLS_FK1` FOREIGN KEY (`SD_ID`) REFERENCES `SDS` (`SD_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `TABLE_PARAMS`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `TABLE_PARAMS` (
-  `TBL_ID` bigint(20) NOT NULL,
-  `PARAM_KEY` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
-  `PARAM_VALUE` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  PRIMARY KEY (`TBL_ID`,`PARAM_KEY`),
-  KEY `TABLE_PARAMS_N49` (`TBL_ID`),
-  CONSTRAINT `TABLE_PARAMS_FK1` FOREIGN KEY (`TBL_ID`) REFERENCES `TBLS` (`TBL_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `TBLS`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `TBLS` (
-  `TBL_ID` bigint(20) NOT NULL,
-  `CREATE_TIME` int(11) NOT NULL,
-  `DB_ID` bigint(20) DEFAULT NULL,
-  `LAST_ACCESS_TIME` int(11) NOT NULL,
-  `OWNER` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `RETENTION` int(11) NOT NULL,
-  `SD_ID` bigint(20) DEFAULT NULL,
-  `TBL_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `TBL_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `VIEW_EXPANDED_TEXT` mediumtext,
-  `VIEW_ORIGINAL_TEXT` mediumtext,
-  `LINK_TARGET_ID` bigint(20) DEFAULT NULL,
-  PRIMARY KEY (`TBL_ID`),
-  UNIQUE KEY `UNIQUETABLE` (`TBL_NAME`,`DB_ID`),
-  KEY `TBLS_N50` (`SD_ID`),
-  KEY `TBLS_N49` (`DB_ID`),
-  KEY `TBLS_N51` (`LINK_TARGET_ID`),
-  CONSTRAINT `TBLS_FK1` FOREIGN KEY (`SD_ID`) REFERENCES `SDS` (`SD_ID`),
-  CONSTRAINT `TBLS_FK2` FOREIGN KEY (`DB_ID`) REFERENCES `DBS` (`DB_ID`),
-  CONSTRAINT `TBLS_FK3` FOREIGN KEY (`LINK_TARGET_ID`) REFERENCES `TBLS` (`TBL_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `TBL_COL_PRIVS`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `TBL_COL_PRIVS` (
-  `TBL_COLUMN_GRANT_ID` bigint(20) NOT NULL,
-  `COLUMN_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `CREATE_TIME` int(11) NOT NULL,
-  `GRANT_OPTION` smallint(6) NOT NULL,
-  `GRANTOR` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `GRANTOR_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PRINCIPAL_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PRINCIPAL_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `TBL_COL_PRIV` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `TBL_ID` bigint(20) DEFAULT NULL,
-  PRIMARY KEY (`TBL_COLUMN_GRANT_ID`),
-  KEY `TABLECOLUMNPRIVILEGEINDEX` (`TBL_ID`,`COLUMN_NAME`,`PRINCIPAL_NAME`,`PRINCIPAL_TYPE`,`TBL_COL_PRIV`,`GRANTOR`,`GRANTOR_TYPE`),
-  KEY `TBL_COL_PRIVS_N49` (`TBL_ID`),
-  CONSTRAINT `TBL_COL_PRIVS_FK1` FOREIGN KEY (`TBL_ID`) REFERENCES `TBLS` (`TBL_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `TBL_PRIVS`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `TBL_PRIVS` (
-  `TBL_GRANT_ID` bigint(20) NOT NULL,
-  `CREATE_TIME` int(11) NOT NULL,
-  `GRANT_OPTION` smallint(6) NOT NULL,
-  `GRANTOR` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `GRANTOR_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PRINCIPAL_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PRINCIPAL_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `TBL_PRIV` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `TBL_ID` bigint(20) DEFAULT NULL,
-  PRIMARY KEY (`TBL_GRANT_ID`),
-  KEY `TBL_PRIVS_N49` (`TBL_ID`),
-  KEY `TABLEPRIVILEGEINDEX` (`TBL_ID`,`PRINCIPAL_NAME`,`PRINCIPAL_TYPE`,`TBL_PRIV`,`GRANTOR`,`GRANTOR_TYPE`),
-  CONSTRAINT `TBL_PRIVS_FK1` FOREIGN KEY (`TBL_ID`) REFERENCES `TBLS` (`TBL_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `TAB_COL_STATS`
---
-CREATE TABLE IF NOT EXISTS `TAB_COL_STATS` (
- `CS_ID` bigint(20) NOT NULL,
- `DB_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `TABLE_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `COLUMN_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `COLUMN_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `TBL_ID` bigint(20) NOT NULL,
- `LONG_LOW_VALUE` bigint(20),
- `LONG_HIGH_VALUE` bigint(20),
- `DOUBLE_HIGH_VALUE` double(53,4),
- `DOUBLE_LOW_VALUE` double(53,4),
- `BIG_DECIMAL_LOW_VALUE` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin,
- `BIG_DECIMAL_HIGH_VALUE` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin,
- `NUM_NULLS` bigint(20) NOT NULL,
- `NUM_DISTINCTS` bigint(20),
- `AVG_COL_LEN` double(53,4),
- `MAX_COL_LEN` bigint(20),
- `NUM_TRUES` bigint(20),
- `NUM_FALSES` bigint(20),
- `LAST_ANALYZED` bigint(20) NOT NULL,
-  PRIMARY KEY (`CS_ID`),
-  CONSTRAINT `TAB_COL_STATS_FK` FOREIGN KEY (`TBL_ID`) REFERENCES `TBLS` (`TBL_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-
---
--- Table structure for table `PART_COL_STATS`
---
-CREATE TABLE IF NOT EXISTS `PART_COL_STATS` (
- `CS_ID` bigint(20) NOT NULL,
- `DB_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `TABLE_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `PARTITION_NAME` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `COLUMN_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `COLUMN_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `PART_ID` bigint(20) NOT NULL,
- `LONG_LOW_VALUE` bigint(20),
- `LONG_HIGH_VALUE` bigint(20),
- `DOUBLE_HIGH_VALUE` double(53,4),
- `DOUBLE_LOW_VALUE` double(53,4),
- `BIG_DECIMAL_LOW_VALUE` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin,
- `BIG_DECIMAL_HIGH_VALUE` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin,
- `NUM_NULLS` bigint(20) NOT NULL,
- `NUM_DISTINCTS` bigint(20),
- `AVG_COL_LEN` double(53,4),
- `MAX_COL_LEN` bigint(20),
- `NUM_TRUES` bigint(20),
- `NUM_FALSES` bigint(20),
- `LAST_ANALYZED` bigint(20) NOT NULL,
-  PRIMARY KEY (`CS_ID`),
-  CONSTRAINT `PART_COL_STATS_FK` FOREIGN KEY (`PART_ID`) REFERENCES `PARTITIONS` (`PART_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-
---
--- Table structure for table `TYPES`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `TYPES` (
-  `TYPES_ID` bigint(20) NOT NULL,
-  `TYPE_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `TYPE1` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `TYPE2` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  PRIMARY KEY (`TYPES_ID`),
-  UNIQUE KEY `UNIQUE_TYPE` (`TYPE_NAME`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `TYPE_FIELDS`
---
-
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE IF NOT EXISTS `TYPE_FIELDS` (
-  `TYPE_NAME` bigint(20) NOT NULL,
-  `COMMENT` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `FIELD_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
-  `FIELD_TYPE` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
-  `INTEGER_IDX` int(11) NOT NULL,
-  PRIMARY KEY (`TYPE_NAME`,`FIELD_NAME`),
-  KEY `TYPE_FIELDS_N49` (`TYPE_NAME`),
-  CONSTRAINT `TYPE_FIELDS_FK1` FOREIGN KEY (`TYPE_NAME`) REFERENCES `TYPES` (`TYPES_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-
--- Table `MASTER_KEYS` for classes [org.apache.hadoop.hive.metastore.model.MMasterKey]
-CREATE TABLE IF NOT EXISTS `MASTER_KEYS`
-(
-    `KEY_ID` INTEGER NOT NULL AUTO_INCREMENT,
-    `MASTER_KEY` VARCHAR(767) BINARY NULL,
-    PRIMARY KEY (`KEY_ID`)
-) ENGINE=INNODB DEFAULT CHARSET=latin1;
-
--- Table `DELEGATION_TOKENS` for classes [org.apache.hadoop.hive.metastore.model.MDelegationToken]
-CREATE TABLE IF NOT EXISTS `DELEGATION_TOKENS`
-(
-    `TOKEN_IDENT` VARCHAR(767) BINARY NOT NULL,
-    `TOKEN` VARCHAR(767) BINARY NULL,
-    PRIMARY KEY (`TOKEN_IDENT`)
-) ENGINE=INNODB DEFAULT CHARSET=latin1;
-
---
--- Table structure for VERSION
---
-CREATE TABLE IF NOT EXISTS `VERSION` (
-  `VER_ID` BIGINT NOT NULL,
-  `SCHEMA_VERSION` VARCHAR(127) NOT NULL,
-  `VERSION_COMMENT` VARCHAR(255),
-  PRIMARY KEY (`VER_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-
-INSERT INTO VERSION (VER_ID, SCHEMA_VERSION, VERSION_COMMENT) VALUES (1, '0.12.0', 'Hive release version 0.12.0');
-
-/*!40101 SET character_set_client = @saved_cs_client */;
-/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
-
-/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
-/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
-/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
-/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
-/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
-/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
-/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-
--- Dump completed on 2012-08-23  0:56:31

+ 0 - 717
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/etc/hive-schema-0.12.0.oracle.sql

@@ -1,717 +0,0 @@
--- Table SEQUENCE_TABLE is an internal table required by DataNucleus.
--- NOTE: Some versions of SchemaTool do not automatically generate this table.
--- See http://www.datanucleus.org/servlet/jira/browse/NUCRDBMS-416
-CREATE TABLE SEQUENCE_TABLE
-(
-   SEQUENCE_NAME VARCHAR2(255) NOT NULL,
-   NEXT_VAL NUMBER NOT NULL
-);
-
-ALTER TABLE SEQUENCE_TABLE ADD CONSTRAINT PART_TABLE_PK PRIMARY KEY (SEQUENCE_NAME);
-
--- Table NUCLEUS_TABLES is an internal table required by DataNucleus.
--- This table is required if datanucleus.autoStartMechanism=SchemaTable
--- NOTE: Some versions of SchemaTool do not automatically generate this table.
--- See http://www.datanucleus.org/servlet/jira/browse/NUCRDBMS-416
-CREATE TABLE NUCLEUS_TABLES
-(
-   CLASS_NAME VARCHAR2(128) NOT NULL,
-   TABLE_NAME VARCHAR2(128) NOT NULL,
-   TYPE VARCHAR2(4) NOT NULL,
-   OWNER VARCHAR2(2) NOT NULL,
-   VERSION VARCHAR2(20) NOT NULL,
-   INTERFACE_NAME VARCHAR2(255) NULL
-);
-
-ALTER TABLE NUCLEUS_TABLES ADD CONSTRAINT NUCLEUS_TABLES_PK PRIMARY KEY (CLASS_NAME);
-
--- Table PART_COL_PRIVS for classes [org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege]
-CREATE TABLE PART_COL_PRIVS
-(
-    PART_COLUMN_GRANT_ID NUMBER NOT NULL,
-    "COLUMN_NAME" VARCHAR2(128) NULL,
-    CREATE_TIME NUMBER (10) NOT NULL,
-    GRANT_OPTION NUMBER (5) NOT NULL,
-    GRANTOR VARCHAR2(128) NULL,
-    GRANTOR_TYPE VARCHAR2(128) NULL,
-    PART_ID NUMBER NULL,
-    PRINCIPAL_NAME VARCHAR2(128) NULL,
-    PRINCIPAL_TYPE VARCHAR2(128) NULL,
-    PART_COL_PRIV VARCHAR2(128) NULL
-);
-
-ALTER TABLE PART_COL_PRIVS ADD CONSTRAINT PART_COL_PRIVS_PK PRIMARY KEY (PART_COLUMN_GRANT_ID);
-
--- Table CDS.
-CREATE TABLE CDS
-(
-    CD_ID NUMBER NOT NULL
-);
-
-ALTER TABLE CDS ADD CONSTRAINT CDS_PK PRIMARY KEY (CD_ID);
-
--- Table COLUMNS_V2 for join relationship
-CREATE TABLE COLUMNS_V2
-(
-    CD_ID NUMBER NOT NULL,
-    "COMMENT" VARCHAR2(256) NULL,
-    "COLUMN_NAME" VARCHAR2(128) NOT NULL,
-    TYPE_NAME VARCHAR2(4000) NOT NULL,
-    INTEGER_IDX NUMBER(10) NOT NULL
-);
-
-ALTER TABLE COLUMNS_V2 ADD CONSTRAINT COLUMNS_V2_PK PRIMARY KEY (CD_ID,"COLUMN_NAME");
-
--- Table PARTITION_KEY_VALS for join relationship
-CREATE TABLE PARTITION_KEY_VALS
-(
-    PART_ID NUMBER NOT NULL,
-    PART_KEY_VAL VARCHAR2(256) NULL,
-    INTEGER_IDX NUMBER(10) NOT NULL
-);
-
-ALTER TABLE PARTITION_KEY_VALS ADD CONSTRAINT PARTITION_KEY_VALS_PK PRIMARY KEY (PART_ID,INTEGER_IDX);
-
--- Table DBS for classes [org.apache.hadoop.hive.metastore.model.MDatabase]
-CREATE TABLE DBS
-(
-    DB_ID NUMBER NOT NULL,
-    "DESC" VARCHAR2(4000) NULL,
-    DB_LOCATION_URI VARCHAR2(4000) NOT NULL,
-    "NAME" VARCHAR2(128) NULL
-);
-
-ALTER TABLE DBS ADD CONSTRAINT DBS_PK PRIMARY KEY (DB_ID);
-
--- Table PARTITION_PARAMS for join relationship
-CREATE TABLE PARTITION_PARAMS
-(
-    PART_ID NUMBER NOT NULL,
-    PARAM_KEY VARCHAR2(256) NOT NULL,
-    PARAM_VALUE VARCHAR2(4000) NULL
-);
-
-ALTER TABLE PARTITION_PARAMS ADD CONSTRAINT PARTITION_PARAMS_PK PRIMARY KEY (PART_ID,PARAM_KEY);
-
--- Table SERDES for classes [org.apache.hadoop.hive.metastore.model.MSerDeInfo]
-CREATE TABLE SERDES
-(
-    SERDE_ID NUMBER NOT NULL,
-    "NAME" VARCHAR2(128) NULL,
-    SLIB VARCHAR2(4000) NULL
-);
-
-ALTER TABLE SERDES ADD CONSTRAINT SERDES_PK PRIMARY KEY (SERDE_ID);
-
--- Table TYPES for classes [org.apache.hadoop.hive.metastore.model.MType]
-CREATE TABLE TYPES
-(
-    TYPES_ID NUMBER NOT NULL,
-    TYPE_NAME VARCHAR2(128) NULL,
-    TYPE1 VARCHAR2(767) NULL,
-    TYPE2 VARCHAR2(767) NULL
-);
-
-ALTER TABLE TYPES ADD CONSTRAINT TYPES_PK PRIMARY KEY (TYPES_ID);
-
--- Table PARTITION_KEYS for join relationship
-CREATE TABLE PARTITION_KEYS
-(
-    TBL_ID NUMBER NOT NULL,
-    PKEY_COMMENT VARCHAR2(4000) NULL,
-    PKEY_NAME VARCHAR2(128) NOT NULL,
-    PKEY_TYPE VARCHAR2(767) NOT NULL,
-    INTEGER_IDX NUMBER(10) NOT NULL
-);
-
-ALTER TABLE PARTITION_KEYS ADD CONSTRAINT PARTITION_KEY_PK PRIMARY KEY (TBL_ID,PKEY_NAME);
-
--- Table ROLES for classes [org.apache.hadoop.hive.metastore.model.MRole]
-CREATE TABLE ROLES
-(
-    ROLE_ID NUMBER NOT NULL,
-    CREATE_TIME NUMBER (10) NOT NULL,
-    OWNER_NAME VARCHAR2(128) NULL,
-    ROLE_NAME VARCHAR2(128) NULL
-);
-
-ALTER TABLE ROLES ADD CONSTRAINT ROLES_PK PRIMARY KEY (ROLE_ID);
-
--- Table PARTITIONS for classes [org.apache.hadoop.hive.metastore.model.MPartition]
-CREATE TABLE PARTITIONS
-(
-    PART_ID NUMBER NOT NULL,
-    CREATE_TIME NUMBER (10) NOT NULL,
-    LAST_ACCESS_TIME NUMBER (10) NOT NULL,
-    PART_NAME VARCHAR2(767) NULL,
-    SD_ID NUMBER NULL,
-    TBL_ID NUMBER NULL
-);
-
-ALTER TABLE PARTITIONS ADD CONSTRAINT PARTITIONS_PK PRIMARY KEY (PART_ID);
-
--- Table INDEX_PARAMS for join relationship
-CREATE TABLE INDEX_PARAMS
-(
-    INDEX_ID NUMBER NOT NULL,
-    PARAM_KEY VARCHAR2(256) NOT NULL,
-    PARAM_VALUE VARCHAR2(4000) NULL
-);
-
-ALTER TABLE INDEX_PARAMS ADD CONSTRAINT INDEX_PARAMS_PK PRIMARY KEY (INDEX_ID,PARAM_KEY);
-
--- Table TBL_COL_PRIVS for classes [org.apache.hadoop.hive.metastore.model.MTableColumnPrivilege]
-CREATE TABLE TBL_COL_PRIVS
-(
-    TBL_COLUMN_GRANT_ID NUMBER NOT NULL,
-    "COLUMN_NAME" VARCHAR2(128) NULL,
-    CREATE_TIME NUMBER (10) NOT NULL,
-    GRANT_OPTION NUMBER (5) NOT NULL,
-    GRANTOR VARCHAR2(128) NULL,
-    GRANTOR_TYPE VARCHAR2(128) NULL,
-    PRINCIPAL_NAME VARCHAR2(128) NULL,
-    PRINCIPAL_TYPE VARCHAR2(128) NULL,
-    TBL_COL_PRIV VARCHAR2(128) NULL,
-    TBL_ID NUMBER NULL
-);
-
-ALTER TABLE TBL_COL_PRIVS ADD CONSTRAINT TBL_COL_PRIVS_PK PRIMARY KEY (TBL_COLUMN_GRANT_ID);
-
--- Table IDXS for classes [org.apache.hadoop.hive.metastore.model.MIndex]
-CREATE TABLE IDXS
-(
-    INDEX_ID NUMBER NOT NULL,
-    CREATE_TIME NUMBER (10) NOT NULL,
-    DEFERRED_REBUILD NUMBER(1) NOT NULL CHECK (DEFERRED_REBUILD IN (1,0)),
-    INDEX_HANDLER_CLASS VARCHAR2(4000) NULL,
-    INDEX_NAME VARCHAR2(128) NULL,
-    INDEX_TBL_ID NUMBER NULL,
-    LAST_ACCESS_TIME NUMBER (10) NOT NULL,
-    ORIG_TBL_ID NUMBER NULL,
-    SD_ID NUMBER NULL
-);
-
-ALTER TABLE IDXS ADD CONSTRAINT IDXS_PK PRIMARY KEY (INDEX_ID);
-
--- Table BUCKETING_COLS for join relationship
-CREATE TABLE BUCKETING_COLS
-(
-    SD_ID NUMBER NOT NULL,
-    BUCKET_COL_NAME VARCHAR2(256) NULL,
-    INTEGER_IDX NUMBER(10) NOT NULL
-);
-
-ALTER TABLE BUCKETING_COLS ADD CONSTRAINT BUCKETING_COLS_PK PRIMARY KEY (SD_ID,INTEGER_IDX);
-
--- Table TYPE_FIELDS for join relationship
-CREATE TABLE TYPE_FIELDS
-(
-    TYPE_NAME NUMBER NOT NULL,
-    "COMMENT" VARCHAR2(256) NULL,
-    FIELD_NAME VARCHAR2(128) NOT NULL,
-    FIELD_TYPE VARCHAR2(767) NOT NULL,
-    INTEGER_IDX NUMBER(10) NOT NULL
-);
-
-ALTER TABLE TYPE_FIELDS ADD CONSTRAINT TYPE_FIELDS_PK PRIMARY KEY (TYPE_NAME,FIELD_NAME);
-
--- Table SD_PARAMS for join relationship
-CREATE TABLE SD_PARAMS
-(
-    SD_ID NUMBER NOT NULL,
-    PARAM_KEY VARCHAR2(256) NOT NULL,
-    PARAM_VALUE VARCHAR2(4000) NULL
-);
-
-ALTER TABLE SD_PARAMS ADD CONSTRAINT SD_PARAMS_PK PRIMARY KEY (SD_ID,PARAM_KEY);
-
--- Table GLOBAL_PRIVS for classes [org.apache.hadoop.hive.metastore.model.MGlobalPrivilege]
-CREATE TABLE GLOBAL_PRIVS
-(
-    USER_GRANT_ID NUMBER NOT NULL,
-    CREATE_TIME NUMBER (10) NOT NULL,
-    GRANT_OPTION NUMBER (5) NOT NULL,
-    GRANTOR VARCHAR2(128) NULL,
-    GRANTOR_TYPE VARCHAR2(128) NULL,
-    PRINCIPAL_NAME VARCHAR2(128) NULL,
-    PRINCIPAL_TYPE VARCHAR2(128) NULL,
-    USER_PRIV VARCHAR2(128) NULL
-);
-
-ALTER TABLE GLOBAL_PRIVS ADD CONSTRAINT GLOBAL_PRIVS_PK PRIMARY KEY (USER_GRANT_ID);
-
--- Table SDS for classes [org.apache.hadoop.hive.metastore.model.MStorageDescriptor]
-CREATE TABLE SDS
-(
-    SD_ID NUMBER NOT NULL,
-    CD_ID NUMBER NULL,
-    INPUT_FORMAT VARCHAR2(4000) NULL,
-    IS_COMPRESSED NUMBER(1) NOT NULL CHECK (IS_COMPRESSED IN (1,0)),
-    LOCATION VARCHAR2(4000) NULL,
-    NUM_BUCKETS NUMBER (10) NOT NULL,
-    OUTPUT_FORMAT VARCHAR2(4000) NULL,
-    SERDE_ID NUMBER NULL,
-    IS_STOREDASSUBDIRECTORIES NUMBER(1) NOT NULL CHECK (IS_STOREDASSUBDIRECTORIES IN (1,0))
-);
-
-ALTER TABLE SDS ADD CONSTRAINT SDS_PK PRIMARY KEY (SD_ID);
-
--- Table TABLE_PARAMS for join relationship
-CREATE TABLE TABLE_PARAMS
-(
-    TBL_ID NUMBER NOT NULL,
-    PARAM_KEY VARCHAR2(256) NOT NULL,
-    PARAM_VALUE VARCHAR2(4000) NULL
-);
-
-ALTER TABLE TABLE_PARAMS ADD CONSTRAINT TABLE_PARAMS_PK PRIMARY KEY (TBL_ID,PARAM_KEY);
-
--- Table SORT_COLS for join relationship
-CREATE TABLE SORT_COLS
-(
-    SD_ID NUMBER NOT NULL,
-    "COLUMN_NAME" VARCHAR2(128) NULL,
-    "ORDER" NUMBER (10) NOT NULL,
-    INTEGER_IDX NUMBER(10) NOT NULL
-);
-
-ALTER TABLE SORT_COLS ADD CONSTRAINT SORT_COLS_PK PRIMARY KEY (SD_ID,INTEGER_IDX);
-
--- Table TBL_PRIVS for classes [org.apache.hadoop.hive.metastore.model.MTablePrivilege]
-CREATE TABLE TBL_PRIVS
-(
-    TBL_GRANT_ID NUMBER NOT NULL,
-    CREATE_TIME NUMBER (10) NOT NULL,
-    GRANT_OPTION NUMBER (5) NOT NULL,
-    GRANTOR VARCHAR2(128) NULL,
-    GRANTOR_TYPE VARCHAR2(128) NULL,
-    PRINCIPAL_NAME VARCHAR2(128) NULL,
-    PRINCIPAL_TYPE VARCHAR2(128) NULL,
-    TBL_PRIV VARCHAR2(128) NULL,
-    TBL_ID NUMBER NULL
-);
-
-ALTER TABLE TBL_PRIVS ADD CONSTRAINT TBL_PRIVS_PK PRIMARY KEY (TBL_GRANT_ID);
-
--- Table DATABASE_PARAMS for join relationship
-CREATE TABLE DATABASE_PARAMS
-(
-    DB_ID NUMBER NOT NULL,
-    PARAM_KEY VARCHAR2(180) NOT NULL,
-    PARAM_VALUE VARCHAR2(4000) NULL
-);
-
-ALTER TABLE DATABASE_PARAMS ADD CONSTRAINT DATABASE_PARAMS_PK PRIMARY KEY (DB_ID,PARAM_KEY);
-
--- Table ROLE_MAP for classes [org.apache.hadoop.hive.metastore.model.MRoleMap]
-CREATE TABLE ROLE_MAP
-(
-    ROLE_GRANT_ID NUMBER NOT NULL,
-    ADD_TIME NUMBER (10) NOT NULL,
-    GRANT_OPTION NUMBER (5) NOT NULL,
-    GRANTOR VARCHAR2(128) NULL,
-    GRANTOR_TYPE VARCHAR2(128) NULL,
-    PRINCIPAL_NAME VARCHAR2(128) NULL,
-    PRINCIPAL_TYPE VARCHAR2(128) NULL,
-    ROLE_ID NUMBER NULL
-);
-
-ALTER TABLE ROLE_MAP ADD CONSTRAINT ROLE_MAP_PK PRIMARY KEY (ROLE_GRANT_ID);
-
--- Table SERDE_PARAMS for join relationship
-CREATE TABLE SERDE_PARAMS
-(
-    SERDE_ID NUMBER NOT NULL,
-    PARAM_KEY VARCHAR2(256) NOT NULL,
-    PARAM_VALUE VARCHAR2(4000) NULL
-);
-
-ALTER TABLE SERDE_PARAMS ADD CONSTRAINT SERDE_PARAMS_PK PRIMARY KEY (SERDE_ID,PARAM_KEY);
-
--- Table PART_PRIVS for classes [org.apache.hadoop.hive.metastore.model.MPartitionPrivilege]
-CREATE TABLE PART_PRIVS
-(
-    PART_GRANT_ID NUMBER NOT NULL,
-    CREATE_TIME NUMBER (10) NOT NULL,
-    GRANT_OPTION NUMBER (5) NOT NULL,
-    GRANTOR VARCHAR2(128) NULL,
-    GRANTOR_TYPE VARCHAR2(128) NULL,
-    PART_ID NUMBER NULL,
-    PRINCIPAL_NAME VARCHAR2(128) NULL,
-    PRINCIPAL_TYPE VARCHAR2(128) NULL,
-    PART_PRIV VARCHAR2(128) NULL
-);
-
-ALTER TABLE PART_PRIVS ADD CONSTRAINT PART_PRIVS_PK PRIMARY KEY (PART_GRANT_ID);
-
--- Table DB_PRIVS for classes [org.apache.hadoop.hive.metastore.model.MDBPrivilege]
-CREATE TABLE DB_PRIVS
-(
-    DB_GRANT_ID NUMBER NOT NULL,
-    CREATE_TIME NUMBER (10) NOT NULL,
-    DB_ID NUMBER NULL,
-    GRANT_OPTION NUMBER (5) NOT NULL,
-    GRANTOR VARCHAR2(128) NULL,
-    GRANTOR_TYPE VARCHAR2(128) NULL,
-    PRINCIPAL_NAME VARCHAR2(128) NULL,
-    PRINCIPAL_TYPE VARCHAR2(128) NULL,
-    DB_PRIV VARCHAR2(128) NULL
-);
-
-ALTER TABLE DB_PRIVS ADD CONSTRAINT DB_PRIVS_PK PRIMARY KEY (DB_GRANT_ID);
-
--- Table TBLS for classes [org.apache.hadoop.hive.metastore.model.MTable]
-CREATE TABLE TBLS
-(
-    TBL_ID NUMBER NOT NULL,
-    CREATE_TIME NUMBER (10) NOT NULL,
-    DB_ID NUMBER NULL,
-    LAST_ACCESS_TIME NUMBER (10) NOT NULL,
-    OWNER VARCHAR2(767) NULL,
-    RETENTION NUMBER (10) NOT NULL,
-    SD_ID NUMBER NULL,
-    TBL_NAME VARCHAR2(128) NULL,
-    TBL_TYPE VARCHAR2(128) NULL,
-    VIEW_EXPANDED_TEXT CLOB NULL,
-    VIEW_ORIGINAL_TEXT CLOB NULL
-);
-
-ALTER TABLE TBLS ADD CONSTRAINT TBLS_PK PRIMARY KEY (TBL_ID);
-
--- Table PARTITION_EVENTS for classes [org.apache.hadoop.hive.metastore.model.MPartitionEvent]
-CREATE TABLE PARTITION_EVENTS
-(
-    PART_NAME_ID NUMBER NOT NULL,
-    DB_NAME VARCHAR2(128) NULL,
-    EVENT_TIME NUMBER NOT NULL,
-    EVENT_TYPE NUMBER (10) NOT NULL,
-    PARTITION_NAME VARCHAR2(767) NULL,
-    TBL_NAME VARCHAR2(128) NULL
-);
-
-ALTER TABLE PARTITION_EVENTS ADD CONSTRAINT PARTITION_EVENTS_PK PRIMARY KEY (PART_NAME_ID);
-
--- Table SKEWED_STRING_LIST for classes [org.apache.hadoop.hive.metastore.model.MStringList]
-CREATE TABLE SKEWED_STRING_LIST
-(
-    STRING_LIST_ID NUMBER NOT NULL
-);
-
-ALTER TABLE SKEWED_STRING_LIST ADD CONSTRAINT SKEWED_STRING_LIST_PK PRIMARY KEY (STRING_LIST_ID);
-
-CREATE TABLE SKEWED_STRING_LIST_VALUES
-(
-    STRING_LIST_ID NUMBER NOT NULL,
-    "STRING_LIST_VALUE" VARCHAR2(256) NULL,
-    INTEGER_IDX NUMBER(10) NOT NULL
-);
-
-ALTER TABLE SKEWED_STRING_LIST_VALUES ADD CONSTRAINT SKEWED_STRING_LIST_VALUES_PK PRIMARY KEY (STRING_LIST_ID,INTEGER_IDX);
-
-ALTER TABLE SKEWED_STRING_LIST_VALUES ADD CONSTRAINT SKEWED_STRING_LIST_VALUES_FK1 FOREIGN KEY (STRING_LIST_ID) REFERENCES SKEWED_STRING_LIST (STRING_LIST_ID) INITIALLY DEFERRED ;
-
-CREATE TABLE SKEWED_COL_NAMES
-(
-    SD_ID NUMBER NOT NULL,
-    "SKEWED_COL_NAME" VARCHAR2(256) NULL,
-    INTEGER_IDX NUMBER(10) NOT NULL
-);
-
-ALTER TABLE SKEWED_COL_NAMES ADD CONSTRAINT SKEWED_COL_NAMES_PK PRIMARY KEY (SD_ID,INTEGER_IDX);
-
-ALTER TABLE SKEWED_COL_NAMES ADD CONSTRAINT SKEWED_COL_NAMES_FK1 FOREIGN KEY (SD_ID) REFERENCES SDS (SD_ID) INITIALLY DEFERRED ;
-
-CREATE TABLE SKEWED_COL_VALUE_LOC_MAP
-(
-    SD_ID NUMBER NOT NULL,
-    STRING_LIST_ID_KID NUMBER NOT NULL,
-    "LOCATION" VARCHAR2(4000) NULL
-);
-
-CREATE TABLE MASTER_KEYS
-(
-    KEY_ID NUMBER (10) NOT NULL,
-    MASTER_KEY VARCHAR2(767) NULL
-);
-
-CREATE TABLE DELEGATION_TOKENS
-(
-    TOKEN_IDENT VARCHAR2(767) NOT NULL,
-    TOKEN VARCHAR2(767) NULL
-);
-
-ALTER TABLE SKEWED_COL_VALUE_LOC_MAP ADD CONSTRAINT SKEWED_COL_VALUE_LOC_MAP_PK PRIMARY KEY (SD_ID,STRING_LIST_ID_KID);
-
-ALTER TABLE SKEWED_COL_VALUE_LOC_MAP ADD CONSTRAINT SKEWED_COL_VALUE_LOC_MAP_FK1 FOREIGN KEY (STRING_LIST_ID_KID) REFERENCES SKEWED_STRING_LIST (STRING_LIST_ID) INITIALLY DEFERRED ;
-
-ALTER TABLE SKEWED_COL_VALUE_LOC_MAP ADD CONSTRAINT SKEWED_COL_VALUE_LOC_MAP_FK2 FOREIGN KEY (SD_ID) REFERENCES SDS (SD_ID) INITIALLY DEFERRED ;
-
-CREATE TABLE SKEWED_VALUES
-(
-    SD_ID_OID NUMBER NOT NULL,
-    STRING_LIST_ID_EID NUMBER NOT NULL,
-    INTEGER_IDX NUMBER(10) NOT NULL
-);
-
-ALTER TABLE SKEWED_VALUES ADD CONSTRAINT SKEWED_VALUES_PK PRIMARY KEY (SD_ID_OID,INTEGER_IDX);
-
-ALTER TABLE SKEWED_VALUES ADD CONSTRAINT SKEWED_VALUES_FK1 FOREIGN KEY (STRING_LIST_ID_EID) REFERENCES SKEWED_STRING_LIST (STRING_LIST_ID) INITIALLY DEFERRED ;
-
-ALTER TABLE SKEWED_VALUES ADD CONSTRAINT SKEWED_VALUES_FK2 FOREIGN KEY (SD_ID_OID) REFERENCES SDS (SD_ID) INITIALLY DEFERRED ;
-
--- column statistics
-
-CREATE TABLE TAB_COL_STATS (
- CS_ID NUMBER NOT NULL,
- DB_NAME VARCHAR2(128) NOT NULL,
- TABLE_NAME VARCHAR2(128) NOT NULL,
- COLUMN_NAME VARCHAR2(128) NOT NULL,
- COLUMN_TYPE VARCHAR2(128) NOT NULL,
- TBL_ID NUMBER NOT NULL,
- LONG_LOW_VALUE NUMBER,
- LONG_HIGH_VALUE NUMBER,
- DOUBLE_LOW_VALUE NUMBER,
- DOUBLE_HIGH_VALUE NUMBER,
- BIG_DECIMAL_LOW_VALUE VARCHAR2(4000),
- BIG_DECIMAL_HIGH_VALUE VARCHAR2(4000),
- NUM_NULLS NUMBER NOT NULL,
- NUM_DISTINCTS NUMBER,
- AVG_COL_LEN NUMBER,
- MAX_COL_LEN NUMBER,
- NUM_TRUES NUMBER,
- NUM_FALSES NUMBER,
- LAST_ANALYZED NUMBER NOT NULL
-);
-
-CREATE TABLE VERSION (
-  VER_ID NUMBER NOT NULL,
-  SCHEMA_VERSION VARCHAR(127) NOT NULL,
-  VERSION_COMMENT VARCHAR(255)
-);
-ALTER TABLE VERSION ADD CONSTRAINT VERSION_PK PRIMARY KEY (VER_ID);
-
-ALTER TABLE TAB_COL_STATS ADD CONSTRAINT TAB_COL_STATS_PKEY PRIMARY KEY (CS_ID);
-
-ALTER TABLE TAB_COL_STATS ADD CONSTRAINT TAB_COL_STATS_FK FOREIGN KEY (TBL_ID) REFERENCES TBLS (TBL_ID) INITIALLY DEFERRED ;
-
-CREATE INDEX TAB_COL_STATS_N49 ON TAB_COL_STATS(TBL_ID);
-
-CREATE TABLE PART_COL_STATS (
- CS_ID NUMBER NOT NULL,
- DB_NAME VARCHAR2(128) NOT NULL,
- TABLE_NAME VARCHAR2(128) NOT NULL,
- PARTITION_NAME VARCHAR2(767) NOT NULL,
- COLUMN_NAME VARCHAR2(128) NOT NULL,
- COLUMN_TYPE VARCHAR2(128) NOT NULL,
- PART_ID NUMBER NOT NULL,
- LONG_LOW_VALUE NUMBER,
- LONG_HIGH_VALUE NUMBER,
- DOUBLE_LOW_VALUE NUMBER,
- DOUBLE_HIGH_VALUE NUMBER,
- BIG_DECIMAL_LOW_VALUE VARCHAR2(4000),
- BIG_DECIMAL_HIGH_VALUE VARCHAR2(4000),
- NUM_NULLS NUMBER NOT NULL,
- NUM_DISTINCTS NUMBER,
- AVG_COL_LEN NUMBER,
- MAX_COL_LEN NUMBER,
- NUM_TRUES NUMBER,
- NUM_FALSES NUMBER,
- LAST_ANALYZED NUMBER NOT NULL
-);
-
-ALTER TABLE PART_COL_STATS ADD CONSTRAINT PART_COL_STATS_PKEY PRIMARY KEY (CS_ID);
-
-ALTER TABLE PART_COL_STATS ADD CONSTRAINT PART_COL_STATS_FK FOREIGN KEY (PART_ID) REFERENCES PARTITIONS (PART_ID) INITIALLY DEFERRED;
-
-CREATE INDEX PART_COL_STATS_N49 ON PART_COL_STATS (PART_ID);
-
--- Constraints for table PART_COL_PRIVS for class(es) [org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege]
-ALTER TABLE PART_COL_PRIVS ADD CONSTRAINT PART_COL_PRIVS_FK1 FOREIGN KEY (PART_ID) REFERENCES PARTITIONS (PART_ID) INITIALLY DEFERRED ;
-
-CREATE INDEX PART_COL_PRIVS_N49 ON PART_COL_PRIVS (PART_ID);
-
-CREATE INDEX PARTITIONCOLUMNPRIVILEGEINDEX ON PART_COL_PRIVS (PART_ID,"COLUMN_NAME",PRINCIPAL_NAME,PRINCIPAL_TYPE,PART_COL_PRIV,GRANTOR,GRANTOR_TYPE);
-
-
--- Constraints for table COLUMNS_V2
-ALTER TABLE COLUMNS_V2 ADD CONSTRAINT COLUMNS_V2_FK1 FOREIGN KEY (CD_ID) REFERENCES CDS (CD_ID) INITIALLY DEFERRED ;
-
-CREATE INDEX COLUMNS_V2_N49 ON COLUMNS_V2 (CD_ID);
-
-
--- Constraints for table PARTITION_KEY_VALS
-ALTER TABLE PARTITION_KEY_VALS ADD CONSTRAINT PARTITION_KEY_VALS_FK1 FOREIGN KEY (PART_ID) REFERENCES PARTITIONS (PART_ID) INITIALLY DEFERRED ;
-
-CREATE INDEX PARTITION_KEY_VALS_N49 ON PARTITION_KEY_VALS (PART_ID);
-
-
--- Constraints for table DBS for class(es) [org.apache.hadoop.hive.metastore.model.MDatabase]
-CREATE UNIQUE INDEX UNIQUE_DATABASE ON DBS ("NAME");
-
-
--- Constraints for table PARTITION_PARAMS
-ALTER TABLE PARTITION_PARAMS ADD CONSTRAINT PARTITION_PARAMS_FK1 FOREIGN KEY (PART_ID) REFERENCES PARTITIONS (PART_ID) INITIALLY DEFERRED ;
-
-CREATE INDEX PARTITION_PARAMS_N49 ON PARTITION_PARAMS (PART_ID);
-
-
--- Constraints for table SERDES for class(es) [org.apache.hadoop.hive.metastore.model.MSerDeInfo]
-
--- Constraints for table TYPES for class(es) [org.apache.hadoop.hive.metastore.model.MType]
-CREATE UNIQUE INDEX UNIQUE_TYPE ON TYPES (TYPE_NAME);
-
-
--- Constraints for table PARTITION_KEYS
-ALTER TABLE PARTITION_KEYS ADD CONSTRAINT PARTITION_KEYS_FK1 FOREIGN KEY (TBL_ID) REFERENCES TBLS (TBL_ID) INITIALLY DEFERRED ;
-
-CREATE INDEX PARTITION_KEYS_N49 ON PARTITION_KEYS (TBL_ID);
-
-
--- Constraints for table ROLES for class(es) [org.apache.hadoop.hive.metastore.model.MRole]
-CREATE UNIQUE INDEX ROLEENTITYINDEX ON ROLES (ROLE_NAME);
-
-
--- Constraints for table PARTITIONS for class(es) [org.apache.hadoop.hive.metastore.model.MPartition]
-ALTER TABLE PARTITIONS ADD CONSTRAINT PARTITIONS_FK1 FOREIGN KEY (TBL_ID) REFERENCES TBLS (TBL_ID) INITIALLY DEFERRED ;
-
-ALTER TABLE PARTITIONS ADD CONSTRAINT PARTITIONS_FK2 FOREIGN KEY (SD_ID) REFERENCES SDS (SD_ID) INITIALLY DEFERRED ;
-
-CREATE INDEX PARTITIONS_N49 ON PARTITIONS (SD_ID);
-
-CREATE INDEX PARTITIONS_N50 ON PARTITIONS (TBL_ID);
-
-CREATE UNIQUE INDEX UNIQUEPARTITION ON PARTITIONS (PART_NAME,TBL_ID);
-
-
--- Constraints for table INDEX_PARAMS
-ALTER TABLE INDEX_PARAMS ADD CONSTRAINT INDEX_PARAMS_FK1 FOREIGN KEY (INDEX_ID) REFERENCES IDXS (INDEX_ID) INITIALLY DEFERRED ;
-
-CREATE INDEX INDEX_PARAMS_N49 ON INDEX_PARAMS (INDEX_ID);
-
-
--- Constraints for table TBL_COL_PRIVS for class(es) [org.apache.hadoop.hive.metastore.model.MTableColumnPrivilege]
-ALTER TABLE TBL_COL_PRIVS ADD CONSTRAINT TBL_COL_PRIVS_FK1 FOREIGN KEY (TBL_ID) REFERENCES TBLS (TBL_ID) INITIALLY DEFERRED ;
-
-CREATE INDEX TABLECOLUMNPRIVILEGEINDEX ON TBL_COL_PRIVS (TBL_ID,"COLUMN_NAME",PRINCIPAL_NAME,PRINCIPAL_TYPE,TBL_COL_PRIV,GRANTOR,GRANTOR_TYPE);
-
-CREATE INDEX TBL_COL_PRIVS_N49 ON TBL_COL_PRIVS (TBL_ID);
-
-
--- Constraints for table IDXS for class(es) [org.apache.hadoop.hive.metastore.model.MIndex]
-ALTER TABLE IDXS ADD CONSTRAINT IDXS_FK2 FOREIGN KEY (SD_ID) REFERENCES SDS (SD_ID) INITIALLY DEFERRED ;
-
-ALTER TABLE IDXS ADD CONSTRAINT IDXS_FK1 FOREIGN KEY (ORIG_TBL_ID) REFERENCES TBLS (TBL_ID) INITIALLY DEFERRED ;
-
-ALTER TABLE IDXS ADD CONSTRAINT IDXS_FK3 FOREIGN KEY (INDEX_TBL_ID) REFERENCES TBLS (TBL_ID) INITIALLY DEFERRED ;
-
-CREATE UNIQUE INDEX UNIQUEINDEX ON IDXS (INDEX_NAME,ORIG_TBL_ID);
-
-CREATE INDEX IDXS_N50 ON IDXS (INDEX_TBL_ID);
-
-CREATE INDEX IDXS_N51 ON IDXS (SD_ID);
-
-CREATE INDEX IDXS_N49 ON IDXS (ORIG_TBL_ID);
-
-
--- Constraints for table BUCKETING_COLS
-ALTER TABLE BUCKETING_COLS ADD CONSTRAINT BUCKETING_COLS_FK1 FOREIGN KEY (SD_ID) REFERENCES SDS (SD_ID) INITIALLY DEFERRED ;
-
-CREATE INDEX BUCKETING_COLS_N49 ON BUCKETING_COLS (SD_ID);
-
-
--- Constraints for table TYPE_FIELDS
-ALTER TABLE TYPE_FIELDS ADD CONSTRAINT TYPE_FIELDS_FK1 FOREIGN KEY (TYPE_NAME) REFERENCES TYPES (TYPES_ID) INITIALLY DEFERRED ;
-
-CREATE INDEX TYPE_FIELDS_N49 ON TYPE_FIELDS (TYPE_NAME);
-
-
--- Constraints for table SD_PARAMS
-ALTER TABLE SD_PARAMS ADD CONSTRAINT SD_PARAMS_FK1 FOREIGN KEY (SD_ID) REFERENCES SDS (SD_ID) INITIALLY DEFERRED ;
-
-CREATE INDEX SD_PARAMS_N49 ON SD_PARAMS (SD_ID);
-
-
--- Constraints for table GLOBAL_PRIVS for class(es) [org.apache.hadoop.hive.metastore.model.MGlobalPrivilege]
-CREATE UNIQUE INDEX GLOBALPRIVILEGEINDEX ON GLOBAL_PRIVS (PRINCIPAL_NAME,PRINCIPAL_TYPE,USER_PRIV,GRANTOR,GRANTOR_TYPE);
-
-
--- Constraints for table SDS for class(es) [org.apache.hadoop.hive.metastore.model.MStorageDescriptor]
-ALTER TABLE SDS ADD CONSTRAINT SDS_FK1 FOREIGN KEY (SERDE_ID) REFERENCES SERDES (SERDE_ID) INITIALLY DEFERRED ;
-ALTER TABLE SDS ADD CONSTRAINT SDS_FK2 FOREIGN KEY (CD_ID) REFERENCES CDS (CD_ID) INITIALLY DEFERRED ;
-
-CREATE INDEX SDS_N49 ON SDS (SERDE_ID);
-CREATE INDEX SDS_N50 ON SDS (CD_ID);
-
-
--- Constraints for table TABLE_PARAMS
-ALTER TABLE TABLE_PARAMS ADD CONSTRAINT TABLE_PARAMS_FK1 FOREIGN KEY (TBL_ID) REFERENCES TBLS (TBL_ID) INITIALLY DEFERRED ;
-
-CREATE INDEX TABLE_PARAMS_N49 ON TABLE_PARAMS (TBL_ID);
-
-
--- Constraints for table SORT_COLS
-ALTER TABLE SORT_COLS ADD CONSTRAINT SORT_COLS_FK1 FOREIGN KEY (SD_ID) REFERENCES SDS (SD_ID) INITIALLY DEFERRED ;
-
-CREATE INDEX SORT_COLS_N49 ON SORT_COLS (SD_ID);
-
-
--- Constraints for table TBL_PRIVS for class(es) [org.apache.hadoop.hive.metastore.model.MTablePrivilege]
-ALTER TABLE TBL_PRIVS ADD CONSTRAINT TBL_PRIVS_FK1 FOREIGN KEY (TBL_ID) REFERENCES TBLS (TBL_ID) INITIALLY DEFERRED ;
-
-CREATE INDEX TBL_PRIVS_N49 ON TBL_PRIVS (TBL_ID);
-
-CREATE INDEX TABLEPRIVILEGEINDEX ON TBL_PRIVS (TBL_ID,PRINCIPAL_NAME,PRINCIPAL_TYPE,TBL_PRIV,GRANTOR,GRANTOR_TYPE);
-
-
--- Constraints for table DATABASE_PARAMS
-ALTER TABLE DATABASE_PARAMS ADD CONSTRAINT DATABASE_PARAMS_FK1 FOREIGN KEY (DB_ID) REFERENCES DBS (DB_ID) INITIALLY DEFERRED ;
-
-CREATE INDEX DATABASE_PARAMS_N49 ON DATABASE_PARAMS (DB_ID);
-
-
--- Constraints for table ROLE_MAP for class(es) [org.apache.hadoop.hive.metastore.model.MRoleMap]
-ALTER TABLE ROLE_MAP ADD CONSTRAINT ROLE_MAP_FK1 FOREIGN KEY (ROLE_ID) REFERENCES ROLES (ROLE_ID) INITIALLY DEFERRED ;
-
-CREATE INDEX ROLE_MAP_N49 ON ROLE_MAP (ROLE_ID);
-
-CREATE UNIQUE INDEX USERROLEMAPINDEX ON ROLE_MAP (PRINCIPAL_NAME,ROLE_ID,GRANTOR,GRANTOR_TYPE);
-
-
--- Constraints for table SERDE_PARAMS
-ALTER TABLE SERDE_PARAMS ADD CONSTRAINT SERDE_PARAMS_FK1 FOREIGN KEY (SERDE_ID) REFERENCES SERDES (SERDE_ID) INITIALLY DEFERRED ;
-
-CREATE INDEX SERDE_PARAMS_N49 ON SERDE_PARAMS (SERDE_ID);
-
-
--- Constraints for table PART_PRIVS for class(es) [org.apache.hadoop.hive.metastore.model.MPartitionPrivilege]
-ALTER TABLE PART_PRIVS ADD CONSTRAINT PART_PRIVS_FK1 FOREIGN KEY (PART_ID) REFERENCES PARTITIONS (PART_ID) INITIALLY DEFERRED ;
-
-CREATE INDEX PARTPRIVILEGEINDEX ON PART_PRIVS (PART_ID,PRINCIPAL_NAME,PRINCIPAL_TYPE,PART_PRIV,GRANTOR,GRANTOR_TYPE);
-
-CREATE INDEX PART_PRIVS_N49 ON PART_PRIVS (PART_ID);
-
-
--- Constraints for table DB_PRIVS for class(es) [org.apache.hadoop.hive.metastore.model.MDBPrivilege]
-ALTER TABLE DB_PRIVS ADD CONSTRAINT DB_PRIVS_FK1 FOREIGN KEY (DB_ID) REFERENCES DBS (DB_ID) INITIALLY DEFERRED ;
-
-CREATE UNIQUE INDEX DBPRIVILEGEINDEX ON DB_PRIVS (DB_ID,PRINCIPAL_NAME,PRINCIPAL_TYPE,DB_PRIV,GRANTOR,GRANTOR_TYPE);
-
-CREATE INDEX DB_PRIVS_N49 ON DB_PRIVS (DB_ID);
-
-
--- Constraints for table TBLS for class(es) [org.apache.hadoop.hive.metastore.model.MTable]
-ALTER TABLE TBLS ADD CONSTRAINT TBLS_FK2 FOREIGN KEY (DB_ID) REFERENCES DBS (DB_ID) INITIALLY DEFERRED ;
-
-ALTER TABLE TBLS ADD CONSTRAINT TBLS_FK1 FOREIGN KEY (SD_ID) REFERENCES SDS (SD_ID) INITIALLY DEFERRED ;
-
-CREATE INDEX TBLS_N49 ON TBLS (DB_ID);
-
-CREATE UNIQUE INDEX UNIQUETABLE ON TBLS (TBL_NAME,DB_ID);
-
-CREATE INDEX TBLS_N50 ON TBLS (SD_ID);
-
-
--- Constraints for table PARTITION_EVENTS for class(es) [org.apache.hadoop.hive.metastore.model.MPartitionEvent]
-CREATE INDEX PARTITIONEVENTINDEX ON PARTITION_EVENTS (PARTITION_NAME);
-
-INSERT INTO VERSION (VER_ID, SCHEMA_VERSION, VERSION_COMMENT) VALUES (1, '0.12.0', 'Hive release version 0.12.0');

+ 0 - 1405
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/etc/hive-schema-0.12.0.postgres.sql

@@ -1,1405 +0,0 @@
---
--- PostgreSQL database dump
---
-
-SET statement_timeout = 0;
-SET client_encoding = 'UTF8';
-SET standard_conforming_strings = off;
-SET check_function_bodies = false;
-SET client_min_messages = warning;
-SET escape_string_warning = off;
-
-SET search_path = public, pg_catalog;
-
-SET default_tablespace = '';
-
-SET default_with_oids = false;
-
---
--- Name: BUCKETING_COLS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "BUCKETING_COLS" (
-    "SD_ID" bigint NOT NULL,
-    "BUCKET_COL_NAME" character varying(256) DEFAULT NULL::character varying,
-    "INTEGER_IDX" bigint NOT NULL
-);
-
-
---
--- Name: CDS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "CDS" (
-    "CD_ID" bigint NOT NULL
-);
-
-
---
--- Name: COLUMNS_OLD; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "COLUMNS_OLD" (
-    "SD_ID" bigint NOT NULL,
-    "COMMENT" character varying(256) DEFAULT NULL::character varying,
-    "COLUMN_NAME" character varying(128) NOT NULL,
-    "TYPE_NAME" character varying(4000) NOT NULL,
-    "INTEGER_IDX" bigint NOT NULL
-);
-
-
---
--- Name: COLUMNS_V2; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "COLUMNS_V2" (
-    "CD_ID" bigint NOT NULL,
-    "COMMENT" character varying(4000),
-    "COLUMN_NAME" character varying(128) NOT NULL,
-    "TYPE_NAME" character varying(4000),
-    "INTEGER_IDX" integer NOT NULL
-);
-
-
---
--- Name: DATABASE_PARAMS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "DATABASE_PARAMS" (
-    "DB_ID" bigint NOT NULL,
-    "PARAM_KEY" character varying(180) NOT NULL,
-    "PARAM_VALUE" character varying(4000) DEFAULT NULL::character varying
-);
-
-
---
--- Name: DBS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "DBS" (
-    "DB_ID" bigint NOT NULL,
-    "DESC" character varying(4000) DEFAULT NULL::character varying,
-    "DB_LOCATION_URI" character varying(4000) NOT NULL,
-    "NAME" character varying(128) DEFAULT NULL::character varying
-);
-
-
---
--- Name: DB_PRIVS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "DB_PRIVS" (
-    "DB_GRANT_ID" bigint NOT NULL,
-    "CREATE_TIME" bigint NOT NULL,
-    "DB_ID" bigint,
-    "GRANT_OPTION" smallint NOT NULL,
-    "GRANTOR" character varying(128) DEFAULT NULL::character varying,
-    "GRANTOR_TYPE" character varying(128) DEFAULT NULL::character varying,
-    "PRINCIPAL_NAME" character varying(128) DEFAULT NULL::character varying,
-    "PRINCIPAL_TYPE" character varying(128) DEFAULT NULL::character varying,
-    "DB_PRIV" character varying(128) DEFAULT NULL::character varying
-);
-
-
---
--- Name: GLOBAL_PRIVS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "GLOBAL_PRIVS" (
-    "USER_GRANT_ID" bigint NOT NULL,
-    "CREATE_TIME" bigint NOT NULL,
-    "GRANT_OPTION" smallint NOT NULL,
-    "GRANTOR" character varying(128) DEFAULT NULL::character varying,
-    "GRANTOR_TYPE" character varying(128) DEFAULT NULL::character varying,
-    "PRINCIPAL_NAME" character varying(128) DEFAULT NULL::character varying,
-    "PRINCIPAL_TYPE" character varying(128) DEFAULT NULL::character varying,
-    "USER_PRIV" character varying(128) DEFAULT NULL::character varying
-);
-
-
---
--- Name: IDXS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "IDXS" (
-    "INDEX_ID" bigint NOT NULL,
-    "CREATE_TIME" bigint NOT NULL,
-    "DEFERRED_REBUILD" boolean NOT NULL,
-    "INDEX_HANDLER_CLASS" character varying(4000) DEFAULT NULL::character varying,
-    "INDEX_NAME" character varying(128) DEFAULT NULL::character varying,
-    "INDEX_TBL_ID" bigint,
-    "LAST_ACCESS_TIME" bigint NOT NULL,
-    "ORIG_TBL_ID" bigint,
-    "SD_ID" bigint
-);
-
-
---
--- Name: INDEX_PARAMS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "INDEX_PARAMS" (
-    "INDEX_ID" bigint NOT NULL,
-    "PARAM_KEY" character varying(256) NOT NULL,
-    "PARAM_VALUE" character varying(4000) DEFAULT NULL::character varying
-);
-
-
---
--- Name: NUCLEUS_TABLES; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "NUCLEUS_TABLES" (
-    "CLASS_NAME" character varying(128) NOT NULL,
-    "TABLE_NAME" character varying(128) NOT NULL,
-    "TYPE" character varying(4) NOT NULL,
-    "OWNER" character varying(2) NOT NULL,
-    "VERSION" character varying(20) NOT NULL,
-    "INTERFACE_NAME" character varying(255) DEFAULT NULL::character varying
-);
-
-
---
--- Name: PARTITIONS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "PARTITIONS" (
-    "PART_ID" bigint NOT NULL,
-    "CREATE_TIME" bigint NOT NULL,
-    "LAST_ACCESS_TIME" bigint NOT NULL,
-    "PART_NAME" character varying(767) DEFAULT NULL::character varying,
-    "SD_ID" bigint,
-    "TBL_ID" bigint
-);
-
-
---
--- Name: PARTITION_EVENTS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "PARTITION_EVENTS" (
-    "PART_NAME_ID" bigint NOT NULL,
-    "DB_NAME" character varying(128),
-    "EVENT_TIME" bigint NOT NULL,
-    "EVENT_TYPE" integer NOT NULL,
-    "PARTITION_NAME" character varying(767),
-    "TBL_NAME" character varying(128)
-);
-
-
---
--- Name: PARTITION_KEYS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "PARTITION_KEYS" (
-    "TBL_ID" bigint NOT NULL,
-    "PKEY_COMMENT" character varying(4000) DEFAULT NULL::character varying,
-    "PKEY_NAME" character varying(128) NOT NULL,
-    "PKEY_TYPE" character varying(767) NOT NULL,
-    "INTEGER_IDX" bigint NOT NULL
-);
-
-
---
--- Name: PARTITION_KEY_VALS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "PARTITION_KEY_VALS" (
-    "PART_ID" bigint NOT NULL,
-    "PART_KEY_VAL" character varying(256) DEFAULT NULL::character varying,
-    "INTEGER_IDX" bigint NOT NULL
-);
-
-
---
--- Name: PARTITION_PARAMS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "PARTITION_PARAMS" (
-    "PART_ID" bigint NOT NULL,
-    "PARAM_KEY" character varying(256) NOT NULL,
-    "PARAM_VALUE" character varying(4000) DEFAULT NULL::character varying
-);
-
-
---
--- Name: PART_COL_PRIVS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "PART_COL_PRIVS" (
-    "PART_COLUMN_GRANT_ID" bigint NOT NULL,
-    "COLUMN_NAME" character varying(128) DEFAULT NULL::character varying,
-    "CREATE_TIME" bigint NOT NULL,
-    "GRANT_OPTION" smallint NOT NULL,
-    "GRANTOR" character varying(128) DEFAULT NULL::character varying,
-    "GRANTOR_TYPE" character varying(128) DEFAULT NULL::character varying,
-    "PART_ID" bigint,
-    "PRINCIPAL_NAME" character varying(128) DEFAULT NULL::character varying,
-    "PRINCIPAL_TYPE" character varying(128) DEFAULT NULL::character varying,
-    "PART_COL_PRIV" character varying(128) DEFAULT NULL::character varying
-);
-
-
---
--- Name: PART_PRIVS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "PART_PRIVS" (
-    "PART_GRANT_ID" bigint NOT NULL,
-    "CREATE_TIME" bigint NOT NULL,
-    "GRANT_OPTION" smallint NOT NULL,
-    "GRANTOR" character varying(128) DEFAULT NULL::character varying,
-    "GRANTOR_TYPE" character varying(128) DEFAULT NULL::character varying,
-    "PART_ID" bigint,
-    "PRINCIPAL_NAME" character varying(128) DEFAULT NULL::character varying,
-    "PRINCIPAL_TYPE" character varying(128) DEFAULT NULL::character varying,
-    "PART_PRIV" character varying(128) DEFAULT NULL::character varying
-);
-
-
---
--- Name: ROLES; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "ROLES" (
-    "ROLE_ID" bigint NOT NULL,
-    "CREATE_TIME" bigint NOT NULL,
-    "OWNER_NAME" character varying(128) DEFAULT NULL::character varying,
-    "ROLE_NAME" character varying(128) DEFAULT NULL::character varying
-);
-
-
---
--- Name: ROLE_MAP; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "ROLE_MAP" (
-    "ROLE_GRANT_ID" bigint NOT NULL,
-    "ADD_TIME" bigint NOT NULL,
-    "GRANT_OPTION" smallint NOT NULL,
-    "GRANTOR" character varying(128) DEFAULT NULL::character varying,
-    "GRANTOR_TYPE" character varying(128) DEFAULT NULL::character varying,
-    "PRINCIPAL_NAME" character varying(128) DEFAULT NULL::character varying,
-    "PRINCIPAL_TYPE" character varying(128) DEFAULT NULL::character varying,
-    "ROLE_ID" bigint
-);
-
-
---
--- Name: SDS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "SDS" (
-    "SD_ID" bigint NOT NULL,
-    "INPUT_FORMAT" character varying(4000) DEFAULT NULL::character varying,
-    "IS_COMPRESSED" boolean NOT NULL,
-    "LOCATION" character varying(4000) DEFAULT NULL::character varying,
-    "NUM_BUCKETS" bigint NOT NULL,
-    "OUTPUT_FORMAT" character varying(4000) DEFAULT NULL::character varying,
-    "SERDE_ID" bigint,
-    "CD_ID" bigint,
-    "IS_STOREDASSUBDIRECTORIES" boolean NOT NULL
-);
-
-
---
--- Name: SD_PARAMS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "SD_PARAMS" (
-    "SD_ID" bigint NOT NULL,
-    "PARAM_KEY" character varying(256) NOT NULL,
-    "PARAM_VALUE" character varying(4000) DEFAULT NULL::character varying
-);
-
-
---
--- Name: SEQUENCE_TABLE; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "SEQUENCE_TABLE" (
-    "SEQUENCE_NAME" character varying(255) NOT NULL,
-    "NEXT_VAL" bigint NOT NULL
-);
-
-
---
--- Name: SERDES; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "SERDES" (
-    "SERDE_ID" bigint NOT NULL,
-    "NAME" character varying(128) DEFAULT NULL::character varying,
-    "SLIB" character varying(4000) DEFAULT NULL::character varying
-);
-
-
---
--- Name: SERDE_PARAMS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "SERDE_PARAMS" (
-    "SERDE_ID" bigint NOT NULL,
-    "PARAM_KEY" character varying(256) NOT NULL,
-    "PARAM_VALUE" character varying(4000) DEFAULT NULL::character varying
-);
-
-
---
--- Name: SORT_COLS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "SORT_COLS" (
-    "SD_ID" bigint NOT NULL,
-    "COLUMN_NAME" character varying(128) DEFAULT NULL::character varying,
-    "ORDER" bigint NOT NULL,
-    "INTEGER_IDX" bigint NOT NULL
-);
-
-
---
--- Name: TABLE_PARAMS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "TABLE_PARAMS" (
-    "TBL_ID" bigint NOT NULL,
-    "PARAM_KEY" character varying(256) NOT NULL,
-    "PARAM_VALUE" character varying(4000) DEFAULT NULL::character varying
-);
-
-
---
--- Name: TBLS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "TBLS" (
-    "TBL_ID" bigint NOT NULL,
-    "CREATE_TIME" bigint NOT NULL,
-    "DB_ID" bigint,
-    "LAST_ACCESS_TIME" bigint NOT NULL,
-    "OWNER" character varying(767) DEFAULT NULL::character varying,
-    "RETENTION" bigint NOT NULL,
-    "SD_ID" bigint,
-    "TBL_NAME" character varying(128) DEFAULT NULL::character varying,
-    "TBL_TYPE" character varying(128) DEFAULT NULL::character varying,
-    "VIEW_EXPANDED_TEXT" text,
-    "VIEW_ORIGINAL_TEXT" text
-);
-
-
---
--- Name: TBL_COL_PRIVS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "TBL_COL_PRIVS" (
-    "TBL_COLUMN_GRANT_ID" bigint NOT NULL,
-    "COLUMN_NAME" character varying(128) DEFAULT NULL::character varying,
-    "CREATE_TIME" bigint NOT NULL,
-    "GRANT_OPTION" smallint NOT NULL,
-    "GRANTOR" character varying(128) DEFAULT NULL::character varying,
-    "GRANTOR_TYPE" character varying(128) DEFAULT NULL::character varying,
-    "PRINCIPAL_NAME" character varying(128) DEFAULT NULL::character varying,
-    "PRINCIPAL_TYPE" character varying(128) DEFAULT NULL::character varying,
-    "TBL_COL_PRIV" character varying(128) DEFAULT NULL::character varying,
-    "TBL_ID" bigint
-);
-
-
---
--- Name: TBL_PRIVS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "TBL_PRIVS" (
-    "TBL_GRANT_ID" bigint NOT NULL,
-    "CREATE_TIME" bigint NOT NULL,
-    "GRANT_OPTION" smallint NOT NULL,
-    "GRANTOR" character varying(128) DEFAULT NULL::character varying,
-    "GRANTOR_TYPE" character varying(128) DEFAULT NULL::character varying,
-    "PRINCIPAL_NAME" character varying(128) DEFAULT NULL::character varying,
-    "PRINCIPAL_TYPE" character varying(128) DEFAULT NULL::character varying,
-    "TBL_PRIV" character varying(128) DEFAULT NULL::character varying,
-    "TBL_ID" bigint
-);
-
-
---
--- Name: TYPES; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "TYPES" (
-    "TYPES_ID" bigint NOT NULL,
-    "TYPE_NAME" character varying(128) DEFAULT NULL::character varying,
-    "TYPE1" character varying(767) DEFAULT NULL::character varying,
-    "TYPE2" character varying(767) DEFAULT NULL::character varying
-);
-
-
---
--- Name: TYPE_FIELDS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "TYPE_FIELDS" (
-    "TYPE_NAME" bigint NOT NULL,
-    "COMMENT" character varying(256) DEFAULT NULL::character varying,
-    "FIELD_NAME" character varying(128) NOT NULL,
-    "FIELD_TYPE" character varying(767) NOT NULL,
-    "INTEGER_IDX" bigint NOT NULL
-);
-
-CREATE TABLE "SKEWED_STRING_LIST" (
-    "STRING_LIST_ID" bigint NOT NULL
-);
-
-CREATE TABLE "SKEWED_STRING_LIST_VALUES" (
-    "STRING_LIST_ID" bigint NOT NULL,
-    "STRING_LIST_VALUE" character varying(256) DEFAULT NULL::character varying,
-    "INTEGER_IDX" bigint NOT NULL
-);
-
-CREATE TABLE "SKEWED_COL_NAMES" (
-    "SD_ID" bigint NOT NULL,
-    "SKEWED_COL_NAME" character varying(256) DEFAULT NULL::character varying,
-    "INTEGER_IDX" bigint NOT NULL
-);
-
-CREATE TABLE "SKEWED_COL_VALUE_LOC_MAP" (
-    "SD_ID" bigint NOT NULL,
-    "STRING_LIST_ID_KID" bigint NOT NULL,
-    "LOCATION" character varying(4000) DEFAULT NULL::character varying
-);
-
-CREATE TABLE "SKEWED_VALUES" (
-    "SD_ID_OID" bigint NOT NULL,
-    "STRING_LIST_ID_EID" bigint NOT NULL,
-    "INTEGER_IDX" bigint NOT NULL
-);
-
-
---
--- Name: TAB_COL_STATS Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE  "MASTER_KEYS"
-(
-    "KEY_ID" SERIAL,
-    "MASTER_KEY" varchar(767) NULL,
-    PRIMARY KEY ("KEY_ID")
-);
-
-CREATE TABLE  "DELEGATION_TOKENS"
-(
-    "TOKEN_IDENT" varchar(767) NOT NULL,
-    "TOKEN" varchar(767) NULL,
-    PRIMARY KEY ("TOKEN_IDENT")
-);
-
-CREATE TABLE "TAB_COL_STATS" (
- "CS_ID" bigint NOT NULL,
- "DB_NAME" character varying(128) DEFAULT NULL::character varying,
- "TABLE_NAME" character varying(128) DEFAULT NULL::character varying,
- "COLUMN_NAME" character varying(128) DEFAULT NULL::character varying,
- "COLUMN_TYPE" character varying(128) DEFAULT NULL::character varying,
- "TBL_ID" bigint NOT NULL,
- "LONG_LOW_VALUE" bigint,
- "LONG_HIGH_VALUE" bigint,
- "DOUBLE_LOW_VALUE" double precision,
- "DOUBLE_HIGH_VALUE" double precision,
- "BIG_DECIMAL_LOW_VALUE" character varying(4000) DEFAULT NULL::character varying,
- "BIG_DECIMAL_HIGH_VALUE" character varying(4000) DEFAULT NULL::character varying,
- "NUM_NULLS" bigint NOT NULL,
- "NUM_DISTINCTS" bigint,
- "AVG_COL_LEN" double precision,
- "MAX_COL_LEN" bigint,
- "NUM_TRUES" bigint,
- "NUM_FALSES" bigint,
- "LAST_ANALYZED" bigint NOT NULL
-);
-
---
--- Table structure for VERSION
---
-CREATE TABLE "VERSION" (
-  "VER_ID" bigint,
-  "SCHEMA_VERSION" character varying(127) NOT NULL,
-  "VERSION_COMMENT" character varying(255) NOT NULL
-);
-
---
--- Name: PART_COL_STATS Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE TABLE "PART_COL_STATS" (
- "CS_ID" bigint NOT NULL,
- "DB_NAME" character varying(128) DEFAULT NULL::character varying,
- "TABLE_NAME" character varying(128) DEFAULT NULL::character varying,
- "PARTITION_NAME" character varying(767) DEFAULT NULL::character varying,
- "COLUMN_NAME" character varying(128) DEFAULT NULL::character varying,
- "COLUMN_TYPE" character varying(128) DEFAULT NULL::character varying,
- "PART_ID" bigint NOT NULL,
- "LONG_LOW_VALUE" bigint,
- "LONG_HIGH_VALUE" bigint,
- "DOUBLE_LOW_VALUE" double precision,
- "DOUBLE_HIGH_VALUE" double precision,
- "BIG_DECIMAL_LOW_VALUE" character varying(4000) DEFAULT NULL::character varying,
- "BIG_DECIMAL_HIGH_VALUE" character varying(4000) DEFAULT NULL::character varying,
- "NUM_NULLS" bigint NOT NULL,
- "NUM_DISTINCTS" bigint,
- "AVG_COL_LEN" double precision,
- "MAX_COL_LEN" bigint,
- "NUM_TRUES" bigint,
- "NUM_FALSES" bigint,
- "LAST_ANALYZED" bigint NOT NULL
-);
-
---
--- Name: BUCKETING_COLS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "BUCKETING_COLS"
-    ADD CONSTRAINT "BUCKETING_COLS_pkey" PRIMARY KEY ("SD_ID", "INTEGER_IDX");
-
-
---
--- Name: CDS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "CDS"
-    ADD CONSTRAINT "CDS_pkey" PRIMARY KEY ("CD_ID");
-
-
---
--- Name: COLUMNS_V2_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "COLUMNS_V2"
-    ADD CONSTRAINT "COLUMNS_V2_pkey" PRIMARY KEY ("CD_ID", "COLUMN_NAME");
-
-
---
--- Name: COLUMNS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "COLUMNS_OLD"
-    ADD CONSTRAINT "COLUMNS_pkey" PRIMARY KEY ("SD_ID", "COLUMN_NAME");
-
-
---
--- Name: DATABASE_PARAMS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "DATABASE_PARAMS"
-    ADD CONSTRAINT "DATABASE_PARAMS_pkey" PRIMARY KEY ("DB_ID", "PARAM_KEY");
-
-
---
--- Name: DBPRIVILEGEINDEX; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "DB_PRIVS"
-    ADD CONSTRAINT "DBPRIVILEGEINDEX" UNIQUE ("DB_ID", "PRINCIPAL_NAME", "PRINCIPAL_TYPE", "DB_PRIV", "GRANTOR", "GRANTOR_TYPE");
-
-
---
--- Name: DBS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "DBS"
-    ADD CONSTRAINT "DBS_pkey" PRIMARY KEY ("DB_ID");
-
-
---
--- Name: DB_PRIVS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "DB_PRIVS"
-    ADD CONSTRAINT "DB_PRIVS_pkey" PRIMARY KEY ("DB_GRANT_ID");
-
-
---
--- Name: GLOBALPRIVILEGEINDEX; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "GLOBAL_PRIVS"
-    ADD CONSTRAINT "GLOBALPRIVILEGEINDEX" UNIQUE ("PRINCIPAL_NAME", "PRINCIPAL_TYPE", "USER_PRIV", "GRANTOR", "GRANTOR_TYPE");
-
-
---
--- Name: GLOBAL_PRIVS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "GLOBAL_PRIVS"
-    ADD CONSTRAINT "GLOBAL_PRIVS_pkey" PRIMARY KEY ("USER_GRANT_ID");
-
-
---
--- Name: IDXS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "IDXS"
-    ADD CONSTRAINT "IDXS_pkey" PRIMARY KEY ("INDEX_ID");
-
-
---
--- Name: INDEX_PARAMS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "INDEX_PARAMS"
-    ADD CONSTRAINT "INDEX_PARAMS_pkey" PRIMARY KEY ("INDEX_ID", "PARAM_KEY");
-
-
---
--- Name: NUCLEUS_TABLES_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "NUCLEUS_TABLES"
-    ADD CONSTRAINT "NUCLEUS_TABLES_pkey" PRIMARY KEY ("CLASS_NAME");
-
-
---
--- Name: PARTITIONS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "PARTITIONS"
-    ADD CONSTRAINT "PARTITIONS_pkey" PRIMARY KEY ("PART_ID");
-
-
---
--- Name: PARTITION_EVENTS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "PARTITION_EVENTS"
-    ADD CONSTRAINT "PARTITION_EVENTS_pkey" PRIMARY KEY ("PART_NAME_ID");
-
-
---
--- Name: PARTITION_KEYS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "PARTITION_KEYS"
-    ADD CONSTRAINT "PARTITION_KEYS_pkey" PRIMARY KEY ("TBL_ID", "PKEY_NAME");
-
-
---
--- Name: PARTITION_KEY_VALS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "PARTITION_KEY_VALS"
-    ADD CONSTRAINT "PARTITION_KEY_VALS_pkey" PRIMARY KEY ("PART_ID", "INTEGER_IDX");
-
-
---
--- Name: PARTITION_PARAMS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "PARTITION_PARAMS"
-    ADD CONSTRAINT "PARTITION_PARAMS_pkey" PRIMARY KEY ("PART_ID", "PARAM_KEY");
-
-
---
--- Name: PART_COL_PRIVS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "PART_COL_PRIVS"
-    ADD CONSTRAINT "PART_COL_PRIVS_pkey" PRIMARY KEY ("PART_COLUMN_GRANT_ID");
-
-
---
--- Name: PART_PRIVS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "PART_PRIVS"
-    ADD CONSTRAINT "PART_PRIVS_pkey" PRIMARY KEY ("PART_GRANT_ID");
-
-
---
--- Name: ROLEENTITYINDEX; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "ROLES"
-    ADD CONSTRAINT "ROLEENTITYINDEX" UNIQUE ("ROLE_NAME");
-
-
---
--- Name: ROLES_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "ROLES"
-    ADD CONSTRAINT "ROLES_pkey" PRIMARY KEY ("ROLE_ID");
-
-
---
--- Name: ROLE_MAP_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "ROLE_MAP"
-    ADD CONSTRAINT "ROLE_MAP_pkey" PRIMARY KEY ("ROLE_GRANT_ID");
-
-
---
--- Name: SDS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "SDS"
-    ADD CONSTRAINT "SDS_pkey" PRIMARY KEY ("SD_ID");
-
-
---
--- Name: SD_PARAMS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "SD_PARAMS"
-    ADD CONSTRAINT "SD_PARAMS_pkey" PRIMARY KEY ("SD_ID", "PARAM_KEY");
-
-
---
--- Name: SEQUENCE_TABLE_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "SEQUENCE_TABLE"
-    ADD CONSTRAINT "SEQUENCE_TABLE_pkey" PRIMARY KEY ("SEQUENCE_NAME");
-
-
---
--- Name: SERDES_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "SERDES"
-    ADD CONSTRAINT "SERDES_pkey" PRIMARY KEY ("SERDE_ID");
-
-
---
--- Name: SERDE_PARAMS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "SERDE_PARAMS"
-    ADD CONSTRAINT "SERDE_PARAMS_pkey" PRIMARY KEY ("SERDE_ID", "PARAM_KEY");
-
-
---
--- Name: SORT_COLS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "SORT_COLS"
-    ADD CONSTRAINT "SORT_COLS_pkey" PRIMARY KEY ("SD_ID", "INTEGER_IDX");
-
-
---
--- Name: TABLE_PARAMS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "TABLE_PARAMS"
-    ADD CONSTRAINT "TABLE_PARAMS_pkey" PRIMARY KEY ("TBL_ID", "PARAM_KEY");
-
-
---
--- Name: TBLS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "TBLS"
-    ADD CONSTRAINT "TBLS_pkey" PRIMARY KEY ("TBL_ID");
-
-
---
--- Name: TBL_COL_PRIVS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "TBL_COL_PRIVS"
-    ADD CONSTRAINT "TBL_COL_PRIVS_pkey" PRIMARY KEY ("TBL_COLUMN_GRANT_ID");
-
-
---
--- Name: TBL_PRIVS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "TBL_PRIVS"
-    ADD CONSTRAINT "TBL_PRIVS_pkey" PRIMARY KEY ("TBL_GRANT_ID");
-
-
---
--- Name: TYPES_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "TYPES"
-    ADD CONSTRAINT "TYPES_pkey" PRIMARY KEY ("TYPES_ID");
-
-
---
--- Name: TYPE_FIELDS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "TYPE_FIELDS"
-    ADD CONSTRAINT "TYPE_FIELDS_pkey" PRIMARY KEY ("TYPE_NAME", "FIELD_NAME");
-
-ALTER TABLE ONLY "SKEWED_STRING_LIST"
-    ADD CONSTRAINT "SKEWED_STRING_LIST_pkey" PRIMARY KEY ("STRING_LIST_ID");
-
-ALTER TABLE ONLY "SKEWED_STRING_LIST_VALUES"
-    ADD CONSTRAINT "SKEWED_STRING_LIST_VALUES_pkey" PRIMARY KEY ("STRING_LIST_ID", "INTEGER_IDX");
-
-
-ALTER TABLE ONLY "SKEWED_COL_NAMES"
-    ADD CONSTRAINT "SKEWED_COL_NAMES_pkey" PRIMARY KEY ("SD_ID", "INTEGER_IDX");
-
-ALTER TABLE ONLY "SKEWED_COL_VALUE_LOC_MAP"
-    ADD CONSTRAINT "SKEWED_COL_VALUE_LOC_MAP_pkey" PRIMARY KEY ("SD_ID", "STRING_LIST_ID_KID");
-
-ALTER TABLE ONLY "SKEWED_VALUES"
-    ADD CONSTRAINT "SKEWED_VALUES_pkey" PRIMARY KEY ("SD_ID_OID", "INTEGER_IDX");
-
---
--- Name: TAB_COL_STATS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-ALTER TABLE ONLY "TAB_COL_STATS" ADD CONSTRAINT "TAB_COL_STATS_pkey" PRIMARY KEY("CS_ID");
-
---
--- Name: PART_COL_STATS_pkey; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-ALTER TABLE ONLY "PART_COL_STATS" ADD CONSTRAINT "PART_COL_STATS_pkey" PRIMARY KEY("CS_ID");
-
---
--- Name: UNIQUEINDEX; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "IDXS"
-    ADD CONSTRAINT "UNIQUEINDEX" UNIQUE ("INDEX_NAME", "ORIG_TBL_ID");
-
-
---
--- Name: UNIQUEPARTITION; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "PARTITIONS"
-    ADD CONSTRAINT "UNIQUEPARTITION" UNIQUE ("PART_NAME", "TBL_ID");
-
-
---
--- Name: UNIQUETABLE; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "TBLS"
-    ADD CONSTRAINT "UNIQUETABLE" UNIQUE ("TBL_NAME", "DB_ID");
-
-
---
--- Name: UNIQUE_DATABASE; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "DBS"
-    ADD CONSTRAINT "UNIQUE_DATABASE" UNIQUE ("NAME");
-
-
---
--- Name: UNIQUE_TYPE; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "TYPES"
-    ADD CONSTRAINT "UNIQUE_TYPE" UNIQUE ("TYPE_NAME");
-
-
---
--- Name: USERROLEMAPINDEX; Type: CONSTRAINT; Schema: public; Owner: hiveuser; Tablespace:
---
-
-ALTER TABLE ONLY "ROLE_MAP"
-    ADD CONSTRAINT "USERROLEMAPINDEX" UNIQUE ("PRINCIPAL_NAME", "ROLE_ID", "GRANTOR", "GRANTOR_TYPE");
-
-
---
--- Name: BUCKETING_COLS_N49; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "BUCKETING_COLS_N49" ON "BUCKETING_COLS" USING btree ("SD_ID");
-
-
---
--- Name: COLUMNS_N49; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "COLUMNS_N49" ON "COLUMNS_OLD" USING btree ("SD_ID");
-
-
---
--- Name: DATABASE_PARAMS_N49; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "DATABASE_PARAMS_N49" ON "DATABASE_PARAMS" USING btree ("DB_ID");
-
-
---
--- Name: DB_PRIVS_N49; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "DB_PRIVS_N49" ON "DB_PRIVS" USING btree ("DB_ID");
-
-
---
--- Name: IDXS_N49; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "IDXS_N49" ON "IDXS" USING btree ("ORIG_TBL_ID");
-
-
---
--- Name: IDXS_N50; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "IDXS_N50" ON "IDXS" USING btree ("INDEX_TBL_ID");
-
-
---
--- Name: IDXS_N51; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "IDXS_N51" ON "IDXS" USING btree ("SD_ID");
-
-
---
--- Name: INDEX_PARAMS_N49; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "INDEX_PARAMS_N49" ON "INDEX_PARAMS" USING btree ("INDEX_ID");
-
-
---
--- Name: PARTITIONCOLUMNPRIVILEGEINDEX; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "PARTITIONCOLUMNPRIVILEGEINDEX" ON "PART_COL_PRIVS" USING btree ("PART_ID", "COLUMN_NAME", "PRINCIPAL_NAME", "PRINCIPAL_TYPE", "PART_COL_PRIV", "GRANTOR", "GRANTOR_TYPE");
-
-
---
--- Name: PARTITIONEVENTINDEX; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "PARTITIONEVENTINDEX" ON "PARTITION_EVENTS" USING btree ("PARTITION_NAME");
-
-
---
--- Name: PARTITIONS_N49; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "PARTITIONS_N49" ON "PARTITIONS" USING btree ("TBL_ID");
-
-
---
--- Name: PARTITIONS_N50; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "PARTITIONS_N50" ON "PARTITIONS" USING btree ("SD_ID");
-
-
---
--- Name: PARTITION_KEYS_N49; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "PARTITION_KEYS_N49" ON "PARTITION_KEYS" USING btree ("TBL_ID");
-
-
---
--- Name: PARTITION_KEY_VALS_N49; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "PARTITION_KEY_VALS_N49" ON "PARTITION_KEY_VALS" USING btree ("PART_ID");
-
-
---
--- Name: PARTITION_PARAMS_N49; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "PARTITION_PARAMS_N49" ON "PARTITION_PARAMS" USING btree ("PART_ID");
-
-
---
--- Name: PARTPRIVILEGEINDEX; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "PARTPRIVILEGEINDEX" ON "PART_PRIVS" USING btree ("PART_ID", "PRINCIPAL_NAME", "PRINCIPAL_TYPE", "PART_PRIV", "GRANTOR", "GRANTOR_TYPE");
-
-
---
--- Name: PART_COL_PRIVS_N49; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "PART_COL_PRIVS_N49" ON "PART_COL_PRIVS" USING btree ("PART_ID");
-
-
---
--- Name: PART_PRIVS_N49; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "PART_PRIVS_N49" ON "PART_PRIVS" USING btree ("PART_ID");
-
-
---
--- Name: ROLE_MAP_N49; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "ROLE_MAP_N49" ON "ROLE_MAP" USING btree ("ROLE_ID");
-
-
---
--- Name: SDS_N49; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "SDS_N49" ON "SDS" USING btree ("SERDE_ID");
-
-
---
--- Name: SD_PARAMS_N49; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "SD_PARAMS_N49" ON "SD_PARAMS" USING btree ("SD_ID");
-
-
---
--- Name: SERDE_PARAMS_N49; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "SERDE_PARAMS_N49" ON "SERDE_PARAMS" USING btree ("SERDE_ID");
-
-
---
--- Name: SORT_COLS_N49; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "SORT_COLS_N49" ON "SORT_COLS" USING btree ("SD_ID");
-
-
---
--- Name: TABLECOLUMNPRIVILEGEINDEX; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "TABLECOLUMNPRIVILEGEINDEX" ON "TBL_COL_PRIVS" USING btree ("TBL_ID", "COLUMN_NAME", "PRINCIPAL_NAME", "PRINCIPAL_TYPE", "TBL_COL_PRIV", "GRANTOR", "GRANTOR_TYPE");
-
-
---
--- Name: TABLEPRIVILEGEINDEX; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "TABLEPRIVILEGEINDEX" ON "TBL_PRIVS" USING btree ("TBL_ID", "PRINCIPAL_NAME", "PRINCIPAL_TYPE", "TBL_PRIV", "GRANTOR", "GRANTOR_TYPE");
-
-
---
--- Name: TABLE_PARAMS_N49; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "TABLE_PARAMS_N49" ON "TABLE_PARAMS" USING btree ("TBL_ID");
-
-
---
--- Name: TBLS_N49; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "TBLS_N49" ON "TBLS" USING btree ("DB_ID");
-
-
---
--- Name: TBLS_N50; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "TBLS_N50" ON "TBLS" USING btree ("SD_ID");
-
-
---
--- Name: TBL_COL_PRIVS_N49; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "TBL_COL_PRIVS_N49" ON "TBL_COL_PRIVS" USING btree ("TBL_ID");
-
-
---
--- Name: TBL_PRIVS_N49; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "TBL_PRIVS_N49" ON "TBL_PRIVS" USING btree ("TBL_ID");
-
-
---
--- Name: TYPE_FIELDS_N49; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "TYPE_FIELDS_N49" ON "TYPE_FIELDS" USING btree ("TYPE_NAME");
-
---
--- Name: TAB_COL_STATS_N49; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "TAB_COL_STATS_N49" ON "TAB_COL_STATS" USING btree ("TBL_ID");
-
---
--- Name: PART_COL_STATS_N49; Type: INDEX; Schema: public; Owner: hiveuser; Tablespace:
---
-
-CREATE INDEX "PART_COL_STATS_N49" ON "PART_COL_STATS" USING btree ("PART_ID");
-
-
-ALTER TABLE ONLY "SKEWED_STRING_LIST_VALUES"
-    ADD CONSTRAINT "SKEWED_STRING_LIST_VALUES_fkey" FOREIGN KEY ("STRING_LIST_ID") REFERENCES "SKEWED_STRING_LIST"("STRING_LIST_ID") DEFERRABLE;
-
-
-ALTER TABLE ONLY "SKEWED_COL_NAMES"
-    ADD CONSTRAINT "SKEWED_COL_NAMES_fkey" FOREIGN KEY ("SD_ID") REFERENCES "SDS"("SD_ID") DEFERRABLE;
-
-
-ALTER TABLE ONLY "SKEWED_COL_VALUE_LOC_MAP"
-    ADD CONSTRAINT "SKEWED_COL_VALUE_LOC_MAP_fkey1" FOREIGN KEY ("SD_ID") REFERENCES "SDS"("SD_ID") DEFERRABLE;
-
-ALTER TABLE ONLY "SKEWED_COL_VALUE_LOC_MAP"
-    ADD CONSTRAINT "SKEWED_COL_VALUE_LOC_MAP_fkey2" FOREIGN KEY ("STRING_LIST_ID_KID") REFERENCES "SKEWED_STRING_LIST"("STRING_LIST_ID") DEFERRABLE;
-
-ALTER TABLE ONLY "SKEWED_VALUES"
-    ADD CONSTRAINT "SKEWED_VALUES_fkey1" FOREIGN KEY ("STRING_LIST_ID_EID") REFERENCES "SKEWED_STRING_LIST"("STRING_LIST_ID") DEFERRABLE;
-
-ALTER TABLE ONLY "SKEWED_VALUES"
-    ADD CONSTRAINT "SKEWED_VALUES_fkey2" FOREIGN KEY ("SD_ID_OID") REFERENCES "SDS"("SD_ID") DEFERRABLE;
-
-
---
--- Name: BUCKETING_COLS_SD_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "BUCKETING_COLS"
-    ADD CONSTRAINT "BUCKETING_COLS_SD_ID_fkey" FOREIGN KEY ("SD_ID") REFERENCES "SDS"("SD_ID") DEFERRABLE;
-
-
---
--- Name: COLUMNS_SD_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "COLUMNS_OLD"
-    ADD CONSTRAINT "COLUMNS_SD_ID_fkey" FOREIGN KEY ("SD_ID") REFERENCES "SDS"("SD_ID") DEFERRABLE;
-
-
---
--- Name: COLUMNS_V2_CD_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "COLUMNS_V2"
-    ADD CONSTRAINT "COLUMNS_V2_CD_ID_fkey" FOREIGN KEY ("CD_ID") REFERENCES "CDS"("CD_ID") DEFERRABLE;
-
-
---
--- Name: DATABASE_PARAMS_DB_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "DATABASE_PARAMS"
-    ADD CONSTRAINT "DATABASE_PARAMS_DB_ID_fkey" FOREIGN KEY ("DB_ID") REFERENCES "DBS"("DB_ID") DEFERRABLE;
-
-
---
--- Name: DB_PRIVS_DB_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "DB_PRIVS"
-    ADD CONSTRAINT "DB_PRIVS_DB_ID_fkey" FOREIGN KEY ("DB_ID") REFERENCES "DBS"("DB_ID") DEFERRABLE;
-
-
---
--- Name: IDXS_INDEX_TBL_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "IDXS"
-    ADD CONSTRAINT "IDXS_INDEX_TBL_ID_fkey" FOREIGN KEY ("INDEX_TBL_ID") REFERENCES "TBLS"("TBL_ID") DEFERRABLE;
-
-
---
--- Name: IDXS_ORIG_TBL_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "IDXS"
-    ADD CONSTRAINT "IDXS_ORIG_TBL_ID_fkey" FOREIGN KEY ("ORIG_TBL_ID") REFERENCES "TBLS"("TBL_ID") DEFERRABLE;
-
-
---
--- Name: IDXS_SD_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "IDXS"
-    ADD CONSTRAINT "IDXS_SD_ID_fkey" FOREIGN KEY ("SD_ID") REFERENCES "SDS"("SD_ID") DEFERRABLE;
-
-
---
--- Name: INDEX_PARAMS_INDEX_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "INDEX_PARAMS"
-    ADD CONSTRAINT "INDEX_PARAMS_INDEX_ID_fkey" FOREIGN KEY ("INDEX_ID") REFERENCES "IDXS"("INDEX_ID") DEFERRABLE;
-
-
---
--- Name: PARTITIONS_SD_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "PARTITIONS"
-    ADD CONSTRAINT "PARTITIONS_SD_ID_fkey" FOREIGN KEY ("SD_ID") REFERENCES "SDS"("SD_ID") DEFERRABLE;
-
-
---
--- Name: PARTITIONS_TBL_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "PARTITIONS"
-    ADD CONSTRAINT "PARTITIONS_TBL_ID_fkey" FOREIGN KEY ("TBL_ID") REFERENCES "TBLS"("TBL_ID") DEFERRABLE;
-
-
---
--- Name: PARTITION_KEYS_TBL_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "PARTITION_KEYS"
-    ADD CONSTRAINT "PARTITION_KEYS_TBL_ID_fkey" FOREIGN KEY ("TBL_ID") REFERENCES "TBLS"("TBL_ID") DEFERRABLE;
-
-
---
--- Name: PARTITION_KEY_VALS_PART_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "PARTITION_KEY_VALS"
-    ADD CONSTRAINT "PARTITION_KEY_VALS_PART_ID_fkey" FOREIGN KEY ("PART_ID") REFERENCES "PARTITIONS"("PART_ID") DEFERRABLE;
-
-
---
--- Name: PARTITION_PARAMS_PART_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "PARTITION_PARAMS"
-    ADD CONSTRAINT "PARTITION_PARAMS_PART_ID_fkey" FOREIGN KEY ("PART_ID") REFERENCES "PARTITIONS"("PART_ID") DEFERRABLE;
-
-
---
--- Name: PART_COL_PRIVS_PART_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "PART_COL_PRIVS"
-    ADD CONSTRAINT "PART_COL_PRIVS_PART_ID_fkey" FOREIGN KEY ("PART_ID") REFERENCES "PARTITIONS"("PART_ID") DEFERRABLE;
-
-
---
--- Name: PART_PRIVS_PART_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "PART_PRIVS"
-    ADD CONSTRAINT "PART_PRIVS_PART_ID_fkey" FOREIGN KEY ("PART_ID") REFERENCES "PARTITIONS"("PART_ID") DEFERRABLE;
-
-
---
--- Name: ROLE_MAP_ROLE_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "ROLE_MAP"
-    ADD CONSTRAINT "ROLE_MAP_ROLE_ID_fkey" FOREIGN KEY ("ROLE_ID") REFERENCES "ROLES"("ROLE_ID") DEFERRABLE;
-
-
---
--- Name: SDS_CD_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "SDS"
-    ADD CONSTRAINT "SDS_CD_ID_fkey" FOREIGN KEY ("CD_ID") REFERENCES "CDS"("CD_ID") DEFERRABLE;
-
-
---
--- Name: SDS_SERDE_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "SDS"
-    ADD CONSTRAINT "SDS_SERDE_ID_fkey" FOREIGN KEY ("SERDE_ID") REFERENCES "SERDES"("SERDE_ID") DEFERRABLE;
-
-
---
--- Name: SD_PARAMS_SD_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "SD_PARAMS"
-    ADD CONSTRAINT "SD_PARAMS_SD_ID_fkey" FOREIGN KEY ("SD_ID") REFERENCES "SDS"("SD_ID") DEFERRABLE;
-
-
---
--- Name: SERDE_PARAMS_SERDE_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "SERDE_PARAMS"
-    ADD CONSTRAINT "SERDE_PARAMS_SERDE_ID_fkey" FOREIGN KEY ("SERDE_ID") REFERENCES "SERDES"("SERDE_ID") DEFERRABLE;
-
-
---
--- Name: SORT_COLS_SD_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "SORT_COLS"
-    ADD CONSTRAINT "SORT_COLS_SD_ID_fkey" FOREIGN KEY ("SD_ID") REFERENCES "SDS"("SD_ID") DEFERRABLE;
-
-
---
--- Name: TABLE_PARAMS_TBL_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "TABLE_PARAMS"
-    ADD CONSTRAINT "TABLE_PARAMS_TBL_ID_fkey" FOREIGN KEY ("TBL_ID") REFERENCES "TBLS"("TBL_ID") DEFERRABLE;
-
-
---
--- Name: TBLS_DB_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "TBLS"
-    ADD CONSTRAINT "TBLS_DB_ID_fkey" FOREIGN KEY ("DB_ID") REFERENCES "DBS"("DB_ID") DEFERRABLE;
-
-
---
--- Name: TBLS_SD_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "TBLS"
-    ADD CONSTRAINT "TBLS_SD_ID_fkey" FOREIGN KEY ("SD_ID") REFERENCES "SDS"("SD_ID") DEFERRABLE;
-
-
---
--- Name: TBL_COL_PRIVS_TBL_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "TBL_COL_PRIVS"
-    ADD CONSTRAINT "TBL_COL_PRIVS_TBL_ID_fkey" FOREIGN KEY ("TBL_ID") REFERENCES "TBLS"("TBL_ID") DEFERRABLE;
-
-
---
--- Name: TBL_PRIVS_TBL_ID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "TBL_PRIVS"
-    ADD CONSTRAINT "TBL_PRIVS_TBL_ID_fkey" FOREIGN KEY ("TBL_ID") REFERENCES "TBLS"("TBL_ID") DEFERRABLE;
-
-
---
--- Name: TYPE_FIELDS_TYPE_NAME_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-
-ALTER TABLE ONLY "TYPE_FIELDS"
-    ADD CONSTRAINT "TYPE_FIELDS_TYPE_NAME_fkey" FOREIGN KEY ("TYPE_NAME") REFERENCES "TYPES"("TYPES_ID") DEFERRABLE;
-
---
--- Name: TAB_COL_STATS_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-ALTER TABLE ONLY "TAB_COL_STATS" ADD CONSTRAINT "TAB_COL_STATS_fkey" FOREIGN KEY("TBL_ID") REFERENCES "TBLS"("TBL_ID") DEFERRABLE;
-
-
---
--- Name: PART_COL_STATS_fkey; Type: FK CONSTRAINT; Schema: public; Owner: hiveuser
---
-ALTER TABLE ONLY "PART_COL_STATS" ADD CONSTRAINT "PART_COL_STATS_fkey" FOREIGN KEY("PART_ID") REFERENCES "PARTITIONS"("PART_ID") DEFERRABLE;
-
-
-ALTER TABLE ONLY "VERSION" ADD CONSTRAINT "VERSION_pkey" PRIMARY KEY ("VER_ID");
-
---
--- Name: public; Type: ACL; Schema: -; Owner: hiveuser
---
-
-REVOKE ALL ON SCHEMA public FROM PUBLIC;
-GRANT ALL ON SCHEMA public TO PUBLIC;
-
-
-INSERT INTO "VERSION" ("VER_ID", "SCHEMA_VERSION", "VERSION_COMMENT") VALUES (1, '0.12.0', 'Hive release version 0.12.0');
---
--- PostgreSQL database dump complete
---

+ 4 - 0
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/metainfo.xml

@@ -23,6 +23,10 @@
       <extends>common-services/HIVE/0.12.0.2.0</extends>
       <version>0.13.0.2.1.1.0</version>
       <components>
+        <component>
+          <name>MYSQL_SERVER</name>
+          <deleted>true</deleted>
+        </component>
         <component>
           <name>HIVE_CLIENT</name>
           <configFiles>

+ 0 - 40
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/hcat_client.py

@@ -1,40 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-
-class HCatClient(Script):
-  def install(self, env):
-    import params
-    if params.hcat_home is None:
-      self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-
-  def status(self, env):
-    raise ClientComponentHasNoStatus()
-
-
-if __name__ == "__main__":
-  HCatClient().execute()

+ 0 - 25
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/hcat_service_check.py

@@ -1,25 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from resource_management.libraries.functions import get_unique_id_and_date
-
-def hcat_service_check():
-    import params

+ 0 - 61
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/hive.py

@@ -1,61 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-from resource_management import *
-from resource_management.libraries import functions
-
-def hive(name=None):
-  import params
-  XmlConfig("hive-site.xml",
-            conf_dir = params.hive_conf_dir,
-            configurations = params.config['configurations']['hive-site'],
-            owner=params.hive_user,
-            configuration_attributes=params.config['configuration_attributes']['hive-site']
-  )
-  if name in ["hiveserver2","metastore"]:
-    Execute(format("cmd /c hadoop fs -mkdir -p {hive_warehouse_dir}"), logoutput=True, user=params.hadoop_user)
-
-  if name == 'metastore':
-    if params.init_metastore_schema:
-      check_schema_created_cmd = format('cmd /c "{hive_bin}\\hive.cmd --service schematool -info '
-                                        '-dbType {hive_metastore_db_type} '
-                                        '-userName {hive_metastore_user_name} '
-                                        '-passWord {hive_metastore_user_passwd!p}'
-                                        '&set EXITCODE=%ERRORLEVEL%&exit /B %EXITCODE%"', #cmd "feature", propagate the process exit code manually
-                                        hive_bin=params.hive_bin,
-                                        hive_metastore_db_type=params.hive_metastore_db_type,
-                                        hive_metastore_user_name=params.hive_metastore_user_name,
-                                        hive_metastore_user_passwd=params.hive_metastore_user_passwd)
-      try:
-        Execute(check_schema_created_cmd)
-      except Fail:
-        create_schema_cmd = format('cmd /c {hive_bin}\\hive.cmd --service schematool -initSchema '
-                                   '-dbType {hive_metastore_db_type} '
-                                   '-userName {hive_metastore_user_name} '
-                                   '-passWord {hive_metastore_user_passwd!p}',
-                                   hive_bin=params.hive_bin,
-                                   hive_metastore_db_type=params.hive_metastore_db_type,
-                                   hive_metastore_user_name=params.hive_metastore_user_name,
-                                   hive_metastore_user_passwd=params.hive_metastore_user_passwd)
-        Execute(create_schema_cmd,
-                user = params.hive_user,
-                logoutput=True
-      )

+ 0 - 41
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/hive_client.py

@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-import sys
-from resource_management import *
-from hive import hive
-import service_mapping
-
-class HiveClient(Script):
-  def install(self, env):
-    import params
-    if params.hive_home is None:
-      self.install_packages(env)
-      self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-
-  def status(self, env):
-    import params
-    check_windows_service_status(service_mapping.hive_client_win_service_name)
-
-if __name__ == "__main__":
-  HiveClient().execute()

+ 0 - 53
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/hive_metastore.py

@@ -1,53 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-from hive import hive
-import service_mapping
-
-class HiveMetastore(Script):
-
-  def install(self, env):
-    if not check_windows_service_exists(service_mapping.hive_metastore_win_service_name):
-      self.install_packages(env)
-      self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    hive(name='metastore')
-
-  def start(self, env):
-    import params
-    env.set_params(params)
-    self.configure(env) # FOR SECURITY
-    Service(service_mapping.hive_metastore_win_service_name, action="start")
-
-  def stop(self, env):
-    import params
-    Service(service_mapping.hive_metastore_win_service_name, action="stop")
-
-  def status(self, env):
-    import params
-    check_windows_service_status(service_mapping.hive_metastore_win_service_name)
-
-if __name__ == "__main__":
-  HiveMetastore().execute()

+ 0 - 52
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/hive_server.py

@@ -1,52 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from hive import hive
-import service_mapping
-
-class HiveServer(Script):
-
-  def install(self, env):
-    if not check_windows_service_exists(service_mapping.hive_server_win_service_name):
-      self.install_packages(env)
-      self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    hive(name='hiveserver2')
-
-  def start(self, env):
-    import params
-    env.set_params(params)
-    self.configure(env) # FOR SECURITY
-    Service(service_mapping.hive_server_win_service_name, action="start")
-
-  def stop(self, env):
-    import params
-    Service(service_mapping.hive_server_win_service_name, action="stop")
-
-  def status(self, env):
-    import params
-    check_windows_service_status(service_mapping.hive_server_win_service_name)
-
-if __name__ == "__main__":
-  HiveServer().execute()

+ 0 - 46
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/mysql_server.py

@@ -1,46 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-
-class MysqlServer(Script):
-
-  def install(self, env):
-    self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-
-  def start(self, env):
-    import params
-    env.set_params(params)
-
-  def stop(self, env):
-    import params
-    env.set_params(params)
-
-  def status(self, env):
-    import status_params
-
-if __name__ == "__main__":
-  MysqlServer().execute()

+ 0 - 39
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/service_check.py

@@ -1,39 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-import socket
-import sys
-
-from hcat_service_check import hcat_service_check
-from webhcat_service_check import webhcat_service_check
-
-class HiveServiceCheck(Script):
-  def service_check(self, env):
-    import params
-    env.set_params(params)
-    smoke_cmd = os.path.join(params.hdp_root,"Run-SmokeTests.cmd")
-    service = "HIVE"
-    Execute(format("cmd /C {smoke_cmd} {service}"), user=params.hive_user, logoutput=True)
-
-    webhcat_service_check()
-
-if __name__ == "__main__":
-  HiveServiceCheck().execute()

+ 0 - 23
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/service_mapping.py

@@ -1,23 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-hive_metastore_win_service_name = "metastore"
-hive_client_win_service_name = "hwi"
-hive_server_win_service_name = "hiveserver2"
-webhcat_server_win_service_name = "templeton"

+ 0 - 30
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/webhcat.py

@@ -1,30 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-from resource_management import *
-import sys
-
-
-def webhcat():
-  import params
-  XmlConfig("webhcat-site.xml",
-            conf_dir=params.hcat_config_dir,
-            configurations=params.config['configurations']['webhcat-site']
-  )

+ 0 - 48
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/webhcat_server.py

@@ -1,48 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-import sys
-from resource_management import *
-from webhcat import webhcat
-import service_mapping
-
-class WebHCatServer(Script):
-  def install(self, env):
-    if not check_windows_service_exists(service_mapping.webhcat_server_win_service_name):
-      self.install_packages(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    webhcat()
-
-  def start(self, env):
-    import params
-    self.configure(env) # FOR SECURITY
-    Service(service_mapping.webhcat_server_win_service_name, action="start")
-
-  def stop(self, env):
-    Service(service_mapping.webhcat_server_win_service_name, action="stop")
-
-  def status(self, env):
-    check_windows_service_status(service_mapping.webhcat_server_win_service_name)
-
-if __name__ == "__main__":
-  WebHCatServer().execute()

+ 0 - 27
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/package/scripts/webhcat_service_check.py

@@ -1,27 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-from resource_management import *
-
-def webhcat_service_check():
-  import params
-  smoke_cmd = os.path.join(params.hdp_root,"Run-SmokeTests.cmd")
-  service = "WEBHCAT"
-  Execute(format("cmd /C {smoke_cmd} {service}"), user=params.hcat_user, logoutput=True)