params.py 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354
  1. #!/usr/bin/env python
  2. """
  3. Licensed to the Apache Software Foundation (ASF) under one
  4. or more contributor license agreements. See the NOTICE file
  5. distributed with this work for additional information
  6. regarding copyright ownership. The ASF licenses this file
  7. to you under the Apache License, Version 2.0 (the
  8. "License"); you may not use this file except in compliance
  9. with the License. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. """
  17. from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
  18. from resource_management.libraries.functions.default import default
  19. from resource_management import *
  20. import status_params
  21. import os
  22. # server configurations
  23. config = Script.get_config()
  24. tmp_dir = Script.get_tmp_dir()
  25. stack_name = default("/hostLevelParams/stack_name", None)
  26. # node hostname
  27. hostname = config["hostname"]
  28. # This is expected to be of the form #.#.#.#
  29. stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
  30. hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
  31. stack_is_hdp21 = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.1') >= 0 and compare_versions(hdp_stack_version, '2.2') < 0
  32. # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
  33. version = default("/commandParams/version", None)
  34. # Hadoop params
  35. # TODO, this logic should initialize these parameters in a file inside the HDP 2.2 stack.
  36. if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
  37. # start out with client libraries
  38. hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
  39. hadoop_home = '/usr/hdp/current/hadoop-client'
  40. hive_bin = '/usr/hdp/current/hive-client/bin'
  41. hive_lib = '/usr/hdp/current/hive-client/lib'
  42. # if this is a server action, then use the server binaries; smoke tests
  43. # use the client binaries
  44. command_role = default("/role", "")
  45. server_role_dir_mapping = { 'HIVE_SERVER' : 'hive-server2',
  46. 'HIVE_METASTORE' : 'hive-metastore' }
  47. if command_role in server_role_dir_mapping:
  48. hive_server_root = server_role_dir_mapping[command_role]
  49. hive_bin = format('/usr/hdp/current/{hive_server_root}/bin')
  50. hive_lib = format('/usr/hdp/current/{hive_server_root}/lib')
  51. # there are no client versions of these, use server versions directly
  52. hcat_lib = '/usr/hdp/current/hive-webhcat/share/hcatalog'
  53. webhcat_bin_dir = '/usr/hdp/current/hive-webhcat/sbin'
  54. hive_specific_configs_supported = True
  55. else:
  56. hadoop_bin_dir = "/usr/bin"
  57. hadoop_home = '/usr'
  58. hadoop_streeming_jars = '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar'
  59. hive_bin = '/usr/lib/hive/bin'
  60. hive_lib = '/usr/lib/hive/lib/'
  61. pig_tar_file = '/usr/share/HDP-webhcat/pig.tar.gz'
  62. hive_tar_file = '/usr/share/HDP-webhcat/hive.tar.gz'
  63. sqoop_tar_file = '/usr/share/HDP-webhcat/sqoop*.tar.gz'
  64. if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
  65. hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
  66. webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
  67. # for newer versions
  68. else:
  69. hcat_lib = '/usr/lib/hive-hcatalog/share/hcatalog'
  70. webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
  71. hive_specific_configs_supported = False
  72. hadoop_conf_dir = "/etc/hadoop/conf"
  73. hive_conf_dir_prefix = "/etc/hive"
  74. hive_conf_dir = format("{hive_conf_dir_prefix}/conf")
  75. hive_client_conf_dir = format("{hive_conf_dir_prefix}/conf")
  76. hive_server_conf_dir = format("{hive_conf_dir_prefix}/conf.server")
  77. if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
  78. hcat_conf_dir = '/etc/hcatalog/conf'
  79. config_dir = '/etc/hcatalog/conf'
  80. # for newer versions
  81. else:
  82. hcat_conf_dir = '/etc/hive-hcatalog/conf'
  83. config_dir = '/etc/hive-webhcat/conf'
  84. execute_path = os.environ['PATH'] + os.pathsep + hive_bin + os.pathsep + hadoop_bin_dir
  85. hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
  86. hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
  87. hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']
  88. hive_metastore_db_type = config['configurations']['hive-env']['hive_database_type']
  89. #HACK Temporarily use dbType=azuredb while invoking schematool
  90. if hive_metastore_db_type == "mssql":
  91. hive_metastore_db_type = "azuredb"
  92. #users
  93. hive_user = config['configurations']['hive-env']['hive_user']
  94. #JDBC driver jar name
  95. hive_jdbc_driver = config['configurations']['hive-site']['javax.jdo.option.ConnectionDriverName']
  96. if hive_jdbc_driver == "com.microsoft.sqlserver.jdbc.SQLServerDriver":
  97. jdbc_jar_name = "sqljdbc4.jar"
  98. jdbc_symlink_name = "mssql-jdbc-driver.jar"
  99. elif hive_jdbc_driver == "com.mysql.jdbc.Driver":
  100. jdbc_jar_name = "mysql-connector-java.jar"
  101. jdbc_symlink_name = "mysql-jdbc-driver.jar"
  102. elif hive_jdbc_driver == "org.postgresql.Driver":
  103. jdbc_jar_name = "postgresql-jdbc.jar"
  104. jdbc_symlink_name = "postgres-jdbc-driver.jar"
  105. elif hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
  106. jdbc_jar_name = "ojdbc.jar"
  107. jdbc_symlink_name = "oracle-jdbc-driver.jar"
  108. check_db_connection_jar_name = "DBConnectionVerification.jar"
  109. check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
  110. hive_jdbc_drivers_list = ["com.microsoft.sqlserver.jdbc.SQLServerDriver","com.mysql.jdbc.Driver","org.postgresql.Driver","oracle.jdbc.driver.OracleDriver"]
  111. downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
  112. prepackaged_ojdbc_symlink = format("{hive_lib}/ojdbc6.jar")
  113. #common
  114. hive_metastore_hosts = config['clusterHostInfo']['hive_metastore_host']
  115. hive_metastore_host = hive_metastore_hosts[0]
  116. hive_metastore_port = get_port_from_url(config['configurations']['hive-site']['hive.metastore.uris']) #"9083"
  117. hive_var_lib = '/var/lib/hive'
  118. ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
  119. hive_server_host = config['clusterHostInfo']['hive_server_host'][0]
  120. hive_server_hosts = config['clusterHostInfo']['hive_server_host']
  121. hive_transport_mode = config['configurations']['hive-site']['hive.server2.transport.mode']
  122. if hive_transport_mode.lower() == "http":
  123. hive_server_port = config['configurations']['hive-site']['hive.server2.thrift.http.port']
  124. else:
  125. hive_server_port = default('/configurations/hive-site/hive.server2.thrift.port',"10000")
  126. hive_url = format("jdbc:hive2://{hive_server_host}:{hive_server_port}")
  127. hive_server_principal = config['configurations']['hive-site']['hive.server2.authentication.kerberos.principal']
  128. hive_server2_authentication = config['configurations']['hive-site']['hive.server2.authentication']
  129. smokeuser = config['configurations']['cluster-env']['smokeuser']
  130. smoke_test_sql = format("{tmp_dir}/hiveserver2.sql")
  131. smoke_test_path = format("{tmp_dir}/hiveserver2Smoke.sh")
  132. smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
  133. smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
  134. fs_root = config['configurations']['core-site']['fs.defaultFS']
  135. security_enabled = config['configurations']['cluster-env']['security_enabled']
  136. kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
  137. hive_metastore_keytab_path = config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
  138. #hive_env
  139. hive_dbroot = config['configurations']['hive-env']['hive_dbroot']
  140. hive_log_dir = config['configurations']['hive-env']['hive_log_dir']
  141. hive_pid_dir = status_params.hive_pid_dir
  142. hive_pid = status_params.hive_pid
  143. #Default conf dir for client
  144. hive_conf_dirs_list = [hive_client_conf_dir]
  145. if hostname in hive_metastore_hosts or hostname in hive_server_hosts:
  146. hive_conf_dirs_list.append(hive_server_conf_dir)
  147. if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]:
  148. hive_config_dir = hive_server_conf_dir
  149. else:
  150. hive_config_dir = hive_client_conf_dir
  151. #hive-site
  152. hive_database_name = config['configurations']['hive-env']['hive_database_name']
  153. hive_database = config['configurations']['hive-env']['hive_database']
  154. #Starting hiveserver2
  155. start_hiveserver2_script = 'startHiveserver2.sh.j2'
  156. ##Starting metastore
  157. start_metastore_script = 'startMetastore.sh'
  158. hive_metastore_pid = status_params.hive_metastore_pid
  159. java_share_dir = '/usr/share/java'
  160. driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
  161. hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
  162. user_group = config['configurations']['cluster-env']['user_group']
  163. artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
  164. target = format("{hive_lib}/{jdbc_jar_name}")
  165. jdk_location = config['hostLevelParams']['jdk_location']
  166. driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
  167. start_hiveserver2_path = format("{tmp_dir}/start_hiveserver2_script")
  168. start_metastore_path = format("{tmp_dir}/start_metastore_script")
  169. hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
  170. hive_heapsize = config['configurations']['hive-site']['hive.heapsize']
  171. java64_home = config['hostLevelParams']['java_home']
  172. ##### MYSQL
  173. db_name = config['configurations']['hive-env']['hive_database_name']
  174. mysql_group = 'mysql'
  175. mysql_host = config['clusterHostInfo']['hive_mysql_host']
  176. mysql_adduser_path = format("{tmp_dir}/addMysqlUser.sh")
  177. mysql_deluser_path = format("{tmp_dir}/removeMysqlUser.sh")
  178. ######## Metastore Schema
  179. if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
  180. init_metastore_schema = False
  181. else:
  182. init_metastore_schema = True
  183. ########## HCAT
  184. hcat_dbroot = hcat_lib
  185. hcat_user = config['configurations']['hive-env']['hcat_user']
  186. webhcat_user = config['configurations']['hive-env']['webhcat_user']
  187. hcat_pid_dir = status_params.hcat_pid_dir
  188. hcat_log_dir = config['configurations']['hive-env']['hcat_log_dir']
  189. hcat_env_sh_template = config['configurations']['hcat-env']['content']
  190. #hive-log4j.properties.template
  191. if (('hive-log4j' in config['configurations']) and ('content' in config['configurations']['hive-log4j'])):
  192. log4j_props = config['configurations']['hive-log4j']['content']
  193. else:
  194. log4j_props = None
  195. #webhcat-log4j.properties.template
  196. if (('webhcat-log4j' in config['configurations']) and ('content' in config['configurations']['webhcat-log4j'])):
  197. log4j_webhcat_props = config['configurations']['webhcat-log4j']['content']
  198. else:
  199. log4j_webhcat_props = None
  200. #hive-exec-log4j.properties.template
  201. if (('hive-exec-log4j' in config['configurations']) and ('content' in config['configurations']['hive-exec-log4j'])):
  202. log4j_exec_props = config['configurations']['hive-exec-log4j']['content']
  203. else:
  204. log4j_exec_props = None
  205. daemon_name = status_params.daemon_name
  206. hive_env_sh_template = config['configurations']['hive-env']['content']
  207. hive_hdfs_user_dir = format("/user/{hive_user}")
  208. hive_hdfs_user_mode = 0700
  209. hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.warehouse.dir"]
  210. #for create_hdfs_directory
  211. hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
  212. hdfs_principal_name = default('/configurations/hadoop-env/hdfs_principal_name', 'missing_principal').replace("_HOST", hostname)
  213. # Tez-related properties
  214. tez_user = config['configurations']['tez-env']['tez_user']
  215. # Tez jars
  216. tez_local_api_jars = '/usr/lib/tez/tez*.jar'
  217. tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
  218. app_dir_files = {tez_local_api_jars:None}
  219. # Tez libraries
  220. tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
  221. if System.get_instance().os_family == "ubuntu":
  222. mysql_configname = '/etc/mysql/my.cnf'
  223. else:
  224. mysql_configname = '/etc/my.cnf'
  225. mysql_user = 'mysql'
  226. # Hive security
  227. hive_authorization_enabled = config['configurations']['hive-site']['hive.security.authorization.enabled']
  228. mysql_jdbc_driver_jar = "/usr/share/java/mysql-connector-java.jar"
  229. hive_use_existing_db = hive_database.startswith('Existing')
  230. hive_exclude_packages = []
  231. # There are other packages that contain /usr/share/java/mysql-connector-java.jar (like libmysql-java),
  232. # trying to install mysql-connector-java upon them can cause packages to conflict.
  233. if hive_use_existing_db:
  234. hive_exclude_packages = ['mysql-connector-java','mysql','mysql-server']
  235. else:
  236. if 'role' in config and config['role'] != "MYSQL_SERVER":
  237. hive_exclude_packages = ['mysql','mysql-server']
  238. if os.path.exists(mysql_jdbc_driver_jar):
  239. hive_exclude_packages.append('mysql-connector-java')
  240. ########################################################
  241. ########### WebHCat related params #####################
  242. ########################################################
  243. webhcat_env_sh_template = config['configurations']['webhcat-env']['content']
  244. templeton_log_dir = config['configurations']['hive-env']['hcat_log_dir']
  245. templeton_pid_dir = status_params.hcat_pid_dir
  246. webhcat_pid_file = status_params.webhcat_pid_file
  247. templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
  248. webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
  249. webhcat_apps_dir = "/apps/webhcat"
  250. hcat_hdfs_user_dir = format("/user/{hcat_user}")
  251. hcat_hdfs_user_mode = 0755
  252. webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
  253. webhcat_hdfs_user_mode = 0755
  254. #for create_hdfs_directory
  255. security_param = "true" if security_enabled else "false"
  256. import functools
  257. #create partial functions with common arguments for every HdfsDirectory call
  258. #to create hdfs directory we need to call params.HdfsDirectory in code
  259. HdfsDirectory = functools.partial(
  260. HdfsDirectory,
  261. conf_dir = hadoop_conf_dir,
  262. hdfs_user = hdfs_user,
  263. security_enabled = security_enabled,
  264. keytab = hdfs_user_keytab,
  265. kinit_path_local = kinit_path_local,
  266. bin_dir = hadoop_bin_dir
  267. )
  268. # ranger host
  269. ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
  270. user_input = default("/configurations/ranger-hive-plugin-properties/ranger-hive-plugin-enabled", "no")
  271. has_ranger_admin = not len(ranger_admin_hosts) == 0
  272. if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
  273. # setting flag value for ranger hive plugin
  274. enable_ranger_hive = False
  275. user_input = config['configurations']['ranger-hive-plugin-properties']['ranger-hive-plugin-enabled']
  276. if user_input.lower() == 'yes':
  277. enable_ranger_hive = True
  278. elif user_input.lower() == 'no':
  279. enable_ranger_hive = False
  280. ranger_jdbc_jar_name = "mysql-connector-java.jar"
  281. ranger_downloaded_custom_connector = format("{tmp_dir}/{ranger_jdbc_jar_name}")
  282. ranger_driver_curl_source = format("{jdk_location}/{ranger_jdbc_jar_name}")
  283. ranger_driver_curl_target = format("{java_share_dir}/{ranger_jdbc_jar_name}")