params.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304
  1. #!/usr/bin/env python
  2. """
  3. Licensed to the Apache Software Foundation (ASF) under one
  4. or more contributor license agreements. See the NOTICE file
  5. distributed with this work for additional information
  6. regarding copyright ownership. The ASF licenses this file
  7. to you under the Apache License, Version 2.0 (the
  8. "License"); you may not use this file except in compliance
  9. with the License. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. """
  17. from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
  18. from resource_management import *
  19. import status_params
  20. import os
  21. # server configurations
  22. config = Script.get_config()
  23. tmp_dir = Script.get_tmp_dir()
  24. # This is expected to be of the form #.#.#.#
  25. stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
  26. hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
  27. stack_is_hdp21 = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.1') >= 0 and compare_versions(hdp_stack_version, '2.2') < 0
  28. # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
  29. version = default("/commandParams/version", None)
  30. # Hadoop params
  31. # TODO, this logic should initialize these parameters in a file inside the HDP 2.2 stack.
  32. if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
  33. # start out with client libraries
  34. hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
  35. hadoop_home = '/usr/hdp/current/hadoop-client'
  36. hive_bin = '/usr/hdp/current/hive-client/bin'
  37. hive_lib = '/usr/hdp/current/hive-client/lib'
  38. # if this is a server action, then use the server binaries; smoke tests
  39. # use the client binaries
  40. command_role = default("/role", "")
  41. server_role_dir_mapping = { 'HIVE_SERVER' : 'hive-server2',
  42. 'HIVE_METASTORE' : 'hive-metastore' }
  43. if command_role in server_role_dir_mapping:
  44. hive_server_root = server_role_dir_mapping[command_role]
  45. hive_bin = format('/usr/hdp/current/{hive_server_root}/bin')
  46. hive_lib = format('/usr/hdp/current/{hive_server_root}/lib')
  47. # there are no client versions of these, use server versions directly
  48. hcat_lib = '/usr/hdp/current/hive-webhcat/share/hcatalog'
  49. webhcat_bin_dir = '/usr/hdp/current/hive-webhcat/sbin'
  50. hive_specific_configs_supported = True
  51. else:
  52. hadoop_bin_dir = "/usr/bin"
  53. hadoop_home = '/usr'
  54. hadoop_streeming_jars = '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar'
  55. hive_bin = '/usr/lib/hive/bin'
  56. hive_lib = '/usr/lib/hive/lib/'
  57. pig_tar_file = '/usr/share/HDP-webhcat/pig.tar.gz'
  58. hive_tar_file = '/usr/share/HDP-webhcat/hive.tar.gz'
  59. sqoop_tar_file = '/usr/share/HDP-webhcat/sqoop*.tar.gz'
  60. if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
  61. hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
  62. webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
  63. # for newer versions
  64. else:
  65. hcat_lib = '/usr/lib/hive-hcatalog/share/hcatalog'
  66. webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
  67. hive_specific_configs_supported = False
  68. hadoop_conf_dir = "/etc/hadoop/conf"
  69. hive_conf_dir_prefix = "/etc/hive"
  70. hive_conf_dir = format("{hive_conf_dir_prefix}/conf")
  71. hive_client_conf_dir = format("{hive_conf_dir_prefix}/conf")
  72. hive_server_conf_dir = format("{hive_conf_dir_prefix}/conf.server")
  73. if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
  74. hcat_conf_dir = '/etc/hcatalog/conf'
  75. config_dir = '/etc/hcatalog/conf'
  76. # for newer versions
  77. else:
  78. hcat_conf_dir = '/etc/hive-hcatalog/conf'
  79. config_dir = '/etc/hive-webhcat/conf'
  80. execute_path = os.environ['PATH'] + os.pathsep + hive_bin + os.pathsep + hadoop_bin_dir
  81. hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
  82. hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
  83. hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']
  84. hive_metastore_db_type = config['configurations']['hive-env']['hive_database_type']
  85. #users
  86. hive_user = config['configurations']['hive-env']['hive_user']
  87. #JDBC driver jar name
  88. hive_jdbc_driver = config['configurations']['hive-site']['javax.jdo.option.ConnectionDriverName']
  89. if hive_jdbc_driver == "com.mysql.jdbc.Driver":
  90. jdbc_jar_name = "mysql-connector-java.jar"
  91. jdbc_symlink_name = "mysql-jdbc-driver.jar"
  92. elif hive_jdbc_driver == "org.postgresql.Driver":
  93. jdbc_jar_name = "postgresql-jdbc.jar"
  94. jdbc_symlink_name = "postgres-jdbc-driver.jar"
  95. elif hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
  96. jdbc_jar_name = "ojdbc.jar"
  97. jdbc_symlink_name = "oracle-jdbc-driver.jar"
  98. check_db_connection_jar_name = "DBConnectionVerification.jar"
  99. check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
  100. hive_jdbc_drivers_list = ["com.mysql.jdbc.Driver","org.postgresql.Driver","oracle.jdbc.driver.OracleDriver"]
  101. #common
  102. hive_metastore_host = config['clusterHostInfo']['hive_metastore_host'][0]
  103. hive_metastore_port = get_port_from_url(config['configurations']['hive-site']['hive.metastore.uris']) #"9083"
  104. hive_var_lib = '/var/lib/hive'
  105. ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
  106. hive_server_host = config['clusterHostInfo']['hive_server_host'][0]
  107. hive_server_port = default('/configurations/hive-site/hive.server2.thrift.port',"10000")
  108. hive_url = format("jdbc:hive2://{hive_server_host}:{hive_server_port}")
  109. hive_server_principal = config['configurations']['hive-site']['hive.server2.authentication.kerberos.principal']
  110. hive_server2_authentication = config['configurations']['hive-site']['hive.server2.authentication']
  111. smokeuser = config['configurations']['cluster-env']['smokeuser']
  112. smoke_test_sql = format("{tmp_dir}/hiveserver2.sql")
  113. smoke_test_path = format("{tmp_dir}/hiveserver2Smoke.sh")
  114. smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
  115. fs_root = config['configurations']['core-site']['fs.defaultFS']
  116. security_enabled = config['configurations']['cluster-env']['security_enabled']
  117. kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
  118. hive_metastore_keytab_path = config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
  119. #hive_env
  120. hive_dbroot = config['configurations']['hive-env']['hive_dbroot']
  121. hive_log_dir = config['configurations']['hive-env']['hive_log_dir']
  122. hive_pid_dir = status_params.hive_pid_dir
  123. hive_pid = status_params.hive_pid
  124. #Default conf dir for client
  125. hive_conf_dirs_list = [hive_server_conf_dir, hive_client_conf_dir]
  126. if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]:
  127. hive_config_dir = hive_server_conf_dir
  128. else:
  129. hive_config_dir = hive_client_conf_dir
  130. #hive-site
  131. hive_database_name = config['configurations']['hive-env']['hive_database_name']
  132. hive_database = config['configurations']['hive-env']['hive_database']
  133. #Starting hiveserver2
  134. start_hiveserver2_script = 'startHiveserver2.sh.j2'
  135. ##Starting metastore
  136. start_metastore_script = 'startMetastore.sh'
  137. hive_metastore_pid = status_params.hive_metastore_pid
  138. java_share_dir = '/usr/share/java'
  139. driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
  140. hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
  141. user_group = config['configurations']['cluster-env']['user_group']
  142. artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
  143. target = format("{hive_lib}/{jdbc_jar_name}")
  144. jdk_location = config['hostLevelParams']['jdk_location']
  145. driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
  146. start_hiveserver2_path = format("{tmp_dir}/start_hiveserver2_script")
  147. start_metastore_path = format("{tmp_dir}/start_metastore_script")
  148. hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
  149. hive_heapsize = config['configurations']['hive-site']['hive.heapsize']
  150. java64_home = config['hostLevelParams']['java_home']
  151. ##### MYSQL
  152. db_name = config['configurations']['hive-env']['hive_database_name']
  153. mysql_group = 'mysql'
  154. mysql_host = config['clusterHostInfo']['hive_mysql_host']
  155. mysql_adduser_path = format("{tmp_dir}/addMysqlUser.sh")
  156. mysql_deluser_path = format("{tmp_dir}/removeMysqlUser.sh")
  157. ######## Metastore Schema
  158. if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
  159. init_metastore_schema = False
  160. else:
  161. init_metastore_schema = True
  162. ########## HCAT
  163. hcat_dbroot = hcat_lib
  164. hcat_user = config['configurations']['hive-env']['hcat_user']
  165. webhcat_user = config['configurations']['hive-env']['webhcat_user']
  166. hcat_pid_dir = status_params.hcat_pid_dir
  167. hcat_log_dir = config['configurations']['hive-env']['hcat_log_dir']
  168. hcat_env_sh_template = config['configurations']['hcat-env']['content']
  169. #hive-log4j.properties.template
  170. if (('hive-log4j' in config['configurations']) and ('content' in config['configurations']['hive-log4j'])):
  171. log4j_props = config['configurations']['hive-log4j']['content']
  172. else:
  173. log4j_props = None
  174. #hive-exec-log4j.properties.template
  175. if (('hive-exec-log4j' in config['configurations']) and ('content' in config['configurations']['hive-exec-log4j'])):
  176. log4j_exec_props = config['configurations']['hive-exec-log4j']['content']
  177. else:
  178. log4j_exec_props = None
  179. daemon_name = status_params.daemon_name
  180. hive_env_sh_template = config['configurations']['hive-env']['content']
  181. hive_hdfs_user_dir = format("/user/{hive_user}")
  182. hive_hdfs_user_mode = 0700
  183. hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.warehouse.dir"]
  184. #for create_hdfs_directory
  185. hostname = config["hostname"]
  186. hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
  187. hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
  188. # Tez-related properties
  189. tez_user = config['configurations']['tez-env']['tez_user']
  190. # Tez jars
  191. tez_local_api_jars = '/usr/lib/tez/tez*.jar'
  192. tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
  193. app_dir_files = {tez_local_api_jars:None}
  194. # Tez libraries
  195. tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
  196. if System.get_instance().os_family == "ubuntu":
  197. mysql_configname = '/etc/mysql/my.cnf'
  198. else:
  199. mysql_configname = '/etc/my.cnf'
  200. mysql_user = 'mysql'
  201. # Hive security
  202. hive_authorization_enabled = config['configurations']['hive-site']['hive.security.authorization.enabled']
  203. mysql_jdbc_driver_jar = "/usr/share/java/mysql-connector-java.jar"
  204. hive_use_existing_db = hive_database.startswith('Existing')
  205. # There are other packages that contain /usr/share/java/mysql-connector-java.jar (like libmysql-java),
  206. # trying to install mysql-connector-java upon them can cause packages to conflict.
  207. if hive_use_existing_db:
  208. hive_exclude_packages = ['mysql-connector-java','mysql','mysql-server']
  209. else:
  210. if 'role' in config and config['role'] != "MYSQL_SERVER":
  211. hive_exclude_packages = ['mysql','mysql-server']
  212. else:
  213. hive_exclude_packages = []
  214. ########################################################
  215. ########### WebHCat related params #####################
  216. ########################################################
  217. webhcat_env_sh_template = config['configurations']['webhcat-env']['content']
  218. templeton_log_dir = config['configurations']['hive-env']['hcat_log_dir']
  219. templeton_pid_dir = status_params.hcat_pid_dir
  220. webhcat_pid_file = status_params.webhcat_pid_file
  221. templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
  222. webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
  223. webhcat_apps_dir = "/apps/webhcat"
  224. hcat_hdfs_user_dir = format("/user/{hcat_user}")
  225. hcat_hdfs_user_mode = 0755
  226. webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
  227. webhcat_hdfs_user_mode = 0755
  228. #for create_hdfs_directory
  229. security_param = "true" if security_enabled else "false"
  230. import functools
  231. #create partial functions with common arguments for every HdfsDirectory call
  232. #to create hdfs directory we need to call params.HdfsDirectory in code
  233. HdfsDirectory = functools.partial(
  234. HdfsDirectory,
  235. conf_dir = hadoop_conf_dir,
  236. hdfs_user = hdfs_principal_name if security_enabled else hdfs_user,
  237. security_enabled = security_enabled,
  238. keytab = hdfs_user_keytab,
  239. kinit_path_local = kinit_path_local,
  240. bin_dir = hadoop_bin_dir
  241. )