params.py 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414
  1. #!/usr/bin/env python
  2. """
  3. Licensed to the Apache Software Foundation (ASF) under one
  4. or more contributor license agreements. See the NOTICE file
  5. distributed with this work for additional information
  6. regarding copyright ownership. The ASF licenses this file
  7. to you under the Apache License, Version 2.0 (the
  8. "License"); you may not use this file except in compliance
  9. with the License. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. """
  17. from ambari_commons.constants import AMBARI_SUDO_BINARY
  18. from ambari_commons.os_check import OSCheck
  19. from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
  20. from resource_management.libraries.functions.default import default
  21. from resource_management import *
  22. import status_params
  23. import os
  24. # server configurations
  25. config = Script.get_config()
  26. tmp_dir = Script.get_tmp_dir()
  27. sudo = AMBARI_SUDO_BINARY
  28. stack_name = default("/hostLevelParams/stack_name", None)
  29. # node hostname
  30. hostname = config["hostname"]
  31. # This is expected to be of the form #.#.#.#
  32. stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
  33. hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
  34. stack_is_hdp21 = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.1') >= 0 and compare_versions(hdp_stack_version, '2.2') < 0
  35. # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
  36. version = default("/commandParams/version", None)
  37. # Hadoop params
  38. # TODO, this logic should initialize these parameters in a file inside the HDP 2.2 stack.
  39. if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
  40. # start out with client libraries
  41. hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
  42. hadoop_home = '/usr/hdp/current/hadoop-client'
  43. hive_bin = '/usr/hdp/current/hive-client/bin'
  44. hive_lib = '/usr/hdp/current/hive-client/lib'
  45. # if this is a server action, then use the server binaries; smoke tests
  46. # use the client binaries
  47. command_role = default("/role", "")
  48. server_role_dir_mapping = { 'HIVE_SERVER' : 'hive-server2',
  49. 'HIVE_METASTORE' : 'hive-metastore' }
  50. if command_role in server_role_dir_mapping:
  51. hive_server_root = server_role_dir_mapping[command_role]
  52. hive_bin = format('/usr/hdp/current/{hive_server_root}/bin')
  53. hive_lib = format('/usr/hdp/current/{hive_server_root}/lib')
  54. # there are no client versions of these, use server versions directly
  55. hcat_lib = '/usr/hdp/current/hive-webhcat/share/hcatalog'
  56. webhcat_bin_dir = '/usr/hdp/current/hive-webhcat/sbin'
  57. hive_specific_configs_supported = True
  58. else:
  59. hadoop_bin_dir = "/usr/bin"
  60. hadoop_home = '/usr'
  61. hadoop_streeming_jars = '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar'
  62. hive_bin = '/usr/lib/hive/bin'
  63. hive_lib = '/usr/lib/hive/lib/'
  64. pig_tar_file = '/usr/share/HDP-webhcat/pig.tar.gz'
  65. hive_tar_file = '/usr/share/HDP-webhcat/hive.tar.gz'
  66. sqoop_tar_file = '/usr/share/HDP-webhcat/sqoop*.tar.gz'
  67. if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
  68. hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
  69. webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
  70. # for newer versions
  71. else:
  72. hcat_lib = '/usr/lib/hive-hcatalog/share/hcatalog'
  73. webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
  74. hive_specific_configs_supported = False
  75. hadoop_conf_dir = "/etc/hadoop/conf"
  76. hive_conf_dir_prefix = "/etc/hive"
  77. hive_conf_dir = format("{hive_conf_dir_prefix}/conf")
  78. hive_client_conf_dir = format("{hive_conf_dir_prefix}/conf")
  79. hive_server_conf_dir = format("{hive_conf_dir_prefix}/conf.server")
  80. limits_conf_dir = "/etc/security/limits.d"
  81. if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
  82. hcat_conf_dir = '/etc/hcatalog/conf'
  83. config_dir = '/etc/hcatalog/conf'
  84. # for newer versions
  85. else:
  86. hcat_conf_dir = '/etc/hive-hcatalog/conf'
  87. config_dir = '/etc/hive-webhcat/conf'
  88. execute_path = os.environ['PATH'] + os.pathsep + hive_bin + os.pathsep + hadoop_bin_dir
  89. hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
  90. hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
  91. webhcat_conf_dir = status_params.webhcat_conf_dir
  92. hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']
  93. hive_metastore_db_type = config['configurations']['hive-env']['hive_database_type']
  94. #HACK Temporarily use dbType=azuredb while invoking schematool
  95. if hive_metastore_db_type == "mssql":
  96. hive_metastore_db_type = "azuredb"
  97. #users
  98. hive_user = config['configurations']['hive-env']['hive_user']
  99. #JDBC driver jar name
  100. hive_jdbc_driver = config['configurations']['hive-site']['javax.jdo.option.ConnectionDriverName']
  101. if hive_jdbc_driver == "com.microsoft.sqlserver.jdbc.SQLServerDriver":
  102. jdbc_jar_name = "sqljdbc4.jar"
  103. jdbc_symlink_name = "mssql-jdbc-driver.jar"
  104. elif hive_jdbc_driver == "com.mysql.jdbc.Driver":
  105. jdbc_jar_name = "mysql-connector-java.jar"
  106. jdbc_symlink_name = "mysql-jdbc-driver.jar"
  107. elif hive_jdbc_driver == "org.postgresql.Driver":
  108. jdbc_jar_name = "postgresql-jdbc.jar"
  109. jdbc_symlink_name = "postgres-jdbc-driver.jar"
  110. elif hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
  111. jdbc_jar_name = "ojdbc.jar"
  112. jdbc_symlink_name = "oracle-jdbc-driver.jar"
  113. check_db_connection_jar_name = "DBConnectionVerification.jar"
  114. check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
  115. hive_jdbc_drivers_list = ["com.microsoft.sqlserver.jdbc.SQLServerDriver","com.mysql.jdbc.Driver","org.postgresql.Driver","oracle.jdbc.driver.OracleDriver"]
  116. downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
  117. prepackaged_ojdbc_symlink = format("{hive_lib}/ojdbc6.jar")
  118. templeton_port = config['configurations']['webhcat-site']['templeton.port']
  119. #common
  120. hive_metastore_hosts = config['clusterHostInfo']['hive_metastore_host']
  121. hive_metastore_host = hive_metastore_hosts[0]
  122. hive_metastore_port = get_port_from_url(config['configurations']['hive-site']['hive.metastore.uris']) #"9083"
  123. hive_var_lib = '/var/lib/hive'
  124. ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
  125. hive_server_host = config['clusterHostInfo']['hive_server_host'][0]
  126. hive_server_hosts = config['clusterHostInfo']['hive_server_host']
  127. hive_transport_mode = config['configurations']['hive-site']['hive.server2.transport.mode']
  128. if hive_transport_mode.lower() == "http":
  129. hive_server_port = config['configurations']['hive-site']['hive.server2.thrift.http.port']
  130. else:
  131. hive_server_port = default('/configurations/hive-site/hive.server2.thrift.port',"10000")
  132. hive_url = format("jdbc:hive2://{hive_server_host}:{hive_server_port}")
  133. hive_server_principal = config['configurations']['hive-site']['hive.server2.authentication.kerberos.principal']
  134. hive_server2_authentication = config['configurations']['hive-site']['hive.server2.authentication']
  135. smokeuser = config['configurations']['cluster-env']['smokeuser']
  136. smoke_test_sql = format("{tmp_dir}/hiveserver2.sql")
  137. smoke_test_path = format("{tmp_dir}/hiveserver2Smoke.sh")
  138. smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
  139. smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
  140. fs_root = config['configurations']['core-site']['fs.defaultFS']
  141. security_enabled = config['configurations']['cluster-env']['security_enabled']
  142. kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
  143. hive_metastore_keytab_path = config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
  144. hive_server2_keytab = config['configurations']['hive-site']['hive.server2.authentication.kerberos.keytab']
  145. #hive_env
  146. hive_dbroot = config['configurations']['hive-env']['hive_dbroot']
  147. hive_log_dir = config['configurations']['hive-env']['hive_log_dir']
  148. hive_pid_dir = status_params.hive_pid_dir
  149. hive_pid = status_params.hive_pid
  150. #Default conf dir for client
  151. hive_conf_dirs_list = [hive_client_conf_dir]
  152. if hostname in hive_metastore_hosts or hostname in hive_server_hosts:
  153. hive_conf_dirs_list.append(hive_server_conf_dir)
  154. if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]:
  155. hive_config_dir = hive_server_conf_dir
  156. else:
  157. hive_config_dir = hive_client_conf_dir
  158. #hive-site
  159. hive_database_name = config['configurations']['hive-env']['hive_database_name']
  160. hive_database = config['configurations']['hive-env']['hive_database']
  161. #Starting hiveserver2
  162. start_hiveserver2_script = 'startHiveserver2.sh.j2'
  163. ##Starting metastore
  164. start_metastore_script = 'startMetastore.sh'
  165. hive_metastore_pid = status_params.hive_metastore_pid
  166. java_share_dir = '/usr/share/java'
  167. driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
  168. hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
  169. user_group = config['configurations']['cluster-env']['user_group']
  170. artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
  171. target = format("{hive_lib}/{jdbc_jar_name}")
  172. jdk_location = config['hostLevelParams']['jdk_location']
  173. driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
  174. start_hiveserver2_path = format("{tmp_dir}/start_hiveserver2_script")
  175. start_metastore_path = format("{tmp_dir}/start_metastore_script")
  176. hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
  177. hive_heapsize = config['configurations']['hive-site']['hive.heapsize']
  178. java64_home = config['hostLevelParams']['java_home']
  179. ##### MYSQL
  180. db_name = config['configurations']['hive-env']['hive_database_name']
  181. mysql_group = 'mysql'
  182. mysql_host = config['clusterHostInfo']['hive_mysql_host']
  183. mysql_adduser_path = format("{tmp_dir}/addMysqlUser.sh")
  184. mysql_deluser_path = format("{tmp_dir}/removeMysqlUser.sh")
  185. ######## Metastore Schema
  186. if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
  187. init_metastore_schema = False
  188. else:
  189. init_metastore_schema = True
  190. ########## HCAT
  191. hcat_dbroot = hcat_lib
  192. hcat_user = config['configurations']['hive-env']['hcat_user']
  193. webhcat_user = config['configurations']['hive-env']['webhcat_user']
  194. hcat_pid_dir = status_params.hcat_pid_dir
  195. hcat_log_dir = config['configurations']['hive-env']['hcat_log_dir']
  196. hcat_env_sh_template = config['configurations']['hcat-env']['content']
  197. #hive-log4j.properties.template
  198. if (('hive-log4j' in config['configurations']) and ('content' in config['configurations']['hive-log4j'])):
  199. log4j_props = config['configurations']['hive-log4j']['content']
  200. else:
  201. log4j_props = None
  202. #webhcat-log4j.properties.template
  203. if (('webhcat-log4j' in config['configurations']) and ('content' in config['configurations']['webhcat-log4j'])):
  204. log4j_webhcat_props = config['configurations']['webhcat-log4j']['content']
  205. else:
  206. log4j_webhcat_props = None
  207. #hive-exec-log4j.properties.template
  208. if (('hive-exec-log4j' in config['configurations']) and ('content' in config['configurations']['hive-exec-log4j'])):
  209. log4j_exec_props = config['configurations']['hive-exec-log4j']['content']
  210. else:
  211. log4j_exec_props = None
  212. daemon_name = status_params.daemon_name
  213. process_name = status_params.process_name
  214. hive_env_sh_template = config['configurations']['hive-env']['content']
  215. hive_hdfs_user_dir = format("/user/{hive_user}")
  216. hive_hdfs_user_mode = 0700
  217. hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.warehouse.dir"]
  218. #for create_hdfs_directory
  219. hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
  220. hdfs_principal_name = default('/configurations/hadoop-env/hdfs_principal_name', 'missing_principal').replace("_HOST", hostname)
  221. # Tez-related properties
  222. tez_user = config['configurations']['tez-env']['tez_user']
  223. # Tez jars
  224. tez_local_api_jars = '/usr/lib/tez/tez*.jar'
  225. tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
  226. app_dir_files = {tez_local_api_jars:None}
  227. # Tez libraries
  228. tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
  229. if OSCheck.is_ubuntu_family():
  230. mysql_configname = '/etc/mysql/my.cnf'
  231. else:
  232. mysql_configname = '/etc/my.cnf'
  233. mysql_user = 'mysql'
  234. # Hive security
  235. hive_authorization_enabled = config['configurations']['hive-site']['hive.security.authorization.enabled']
  236. mysql_jdbc_driver_jar = "/usr/share/java/mysql-connector-java.jar"
  237. hive_use_existing_db = hive_database.startswith('Existing')
  238. hive_exclude_packages = []
  239. # There are other packages that contain /usr/share/java/mysql-connector-java.jar (like libmysql-java),
  240. # trying to install mysql-connector-java upon them can cause packages to conflict.
  241. if hive_use_existing_db:
  242. hive_exclude_packages = ['mysql-connector-java', 'mysql', 'mysql-server']
  243. else:
  244. if 'role' in config and config['role'] != "MYSQL_SERVER":
  245. hive_exclude_packages = ['mysql', 'mysql-server']
  246. if os.path.exists(mysql_jdbc_driver_jar):
  247. hive_exclude_packages.append('mysql-connector-java')
  248. ########################################################
  249. ########### WebHCat related params #####################
  250. ########################################################
  251. webhcat_env_sh_template = config['configurations']['webhcat-env']['content']
  252. templeton_log_dir = config['configurations']['hive-env']['hcat_log_dir']
  253. templeton_pid_dir = status_params.hcat_pid_dir
  254. webhcat_pid_file = status_params.webhcat_pid_file
  255. templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
  256. webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
  257. webhcat_apps_dir = "/apps/webhcat"
  258. hcat_hdfs_user_dir = format("/user/{hcat_user}")
  259. hcat_hdfs_user_mode = 0755
  260. webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
  261. webhcat_hdfs_user_mode = 0755
  262. #for create_hdfs_directory
  263. security_param = "true" if security_enabled else "false"
  264. import functools
  265. #create partial functions with common arguments for every HdfsDirectory call
  266. #to create hdfs directory we need to call params.HdfsDirectory in code
  267. HdfsDirectory = functools.partial(
  268. HdfsDirectory,
  269. conf_dir = hadoop_conf_dir,
  270. hdfs_user = hdfs_user,
  271. security_enabled = security_enabled,
  272. keytab = hdfs_user_keytab,
  273. kinit_path_local = kinit_path_local,
  274. bin_dir = hadoop_bin_dir
  275. )
  276. # ranger host
  277. ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
  278. has_ranger_admin = not len(ranger_admin_hosts) == 0
  279. if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
  280. # setting flag value for ranger hive plugin
  281. enable_ranger_hive = False
  282. ranger_plugin_enable = default("/configurations/ranger-hive-plugin-properties/ranger-hive-plugin-enabled", "no")
  283. if ranger_plugin_enable.lower() == 'yes':
  284. enable_ranger_hive = True
  285. elif ranger_plugin_enable.lower() == 'no':
  286. enable_ranger_hive = False
  287. #ranger hive properties
  288. policymgr_mgr_url = default("/configurations/admin-properties/policymgr_external_url", "http://localhost:6080")
  289. sql_connector_jar = default("/configurations/admin-properties/SQL_CONNECTOR_JAR", "/usr/share/java/mysql-connector-java.jar")
  290. xa_audit_db_flavor = default("/configurations/admin-properties/DB_FLAVOR", "MYSQL")
  291. xa_audit_db_name = default("/configurations/admin-properties/audit_db_name", "ranger_audit")
  292. xa_audit_db_user = default("/configurations/admin-properties/audit_db_user", "rangerlogger")
  293. xa_audit_db_password = default("/configurations/admin-properties/audit_db_password", "rangerlogger")
  294. xa_db_host = default("/configurations/admin-properties/db_host", "localhost")
  295. repo_name = str(config['clusterName']) + '_hive'
  296. db_enabled = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.DB.IS_ENABLED", "false")
  297. hdfs_enabled = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.IS_ENABLED", "false")
  298. hdfs_dest_dir = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINATION_DIRECTORY", "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/app-type/time:yyyyMMdd")
  299. hdfs_buffer_dir = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY", "__REPLACE__LOG_DIR/hadoop/app-type/audit")
  300. hdfs_archive_dir = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY", "__REPLACE__LOG_DIR/hadoop/app-type/audit/archive")
  301. hdfs_dest_file = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_FILE", "hostname-audit.log")
  302. hdfs_dest_flush_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS", "900")
  303. hdfs_dest_rollover_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS", "86400")
  304. hdfs_dest_open_retry_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS", "60")
  305. hdfs_buffer_file = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_FILE", "time:yyyyMMdd-HHmm.ss.log")
  306. hdfs_buffer_flush_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS", "60")
  307. hdfs_buffer_rollover_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS", "600")
  308. hdfs_archive_max_file_count = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT", "10")
  309. ssl_keystore_file = default("/configurations/ranger-hive-plugin-properties/SSL_KEYSTORE_FILE_PATH", "/etc/hadoop/conf/ranger-plugin-keystore.jks")
  310. ssl_keystore_password = default("/configurations/ranger-hive-plugin-properties/SSL_KEYSTORE_PASSWORD", "myKeyFilePassword")
  311. ssl_truststore_file = default("/configurations/ranger-hive-plugin-properties/SSL_TRUSTSTORE_FILE_PATH", "/etc/hadoop/conf/ranger-plugin-truststore.jks")
  312. ssl_truststore_password = default("/configurations/ranger-hive-plugin-properties/SSL_TRUSTSTORE_PASSWORD", "changeit")
  313. grant_revoke = default("/configurations/ranger-hive-plugin-properties/UPDATE_XAPOLICIES_ON_GRANT_REVOKE","true")
  314. jdbc_driver_class_name = default("/configurations/ranger-hive-plugin-properties/jdbc.driverClassName","")
  315. common_name_for_certificate = default("/configurations/ranger-hive-plugin-properties/common.name.for.certificate", "-")
  316. repo_config_username = default("/configurations/ranger-hive-plugin-properties/REPOSITORY_CONFIG_USERNAME", "hive")
  317. repo_config_password = default("/configurations/ranger-hive-plugin-properties/REPOSITORY_CONFIG_PASSWORD", "hive")
  318. admin_uname = default("/configurations/ranger-env/admin_username", "admin")
  319. admin_password = default("/configurations/ranger-env/admin_password", "admin")
  320. admin_uname_password = format("{admin_uname}:{admin_password}")
  321. ambari_ranger_admin = default("/configurations/ranger-env/ranger_admin_username", "amb_ranger_admin")
  322. ambari_ranger_password = default("/configurations/ranger-env/ranger_admin_password", "ambari123")
  323. policy_user = default("/configurations/ranger-hive-plugin-properties/policy_user", "ambari-qa")
  324. #For curl command in ranger plugin to get db connector
  325. if xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'mysql':
  326. ranger_jdbc_symlink_name = "mysql-jdbc-driver.jar"
  327. ranger_jdbc_jar_name = "mysql-connector-java.jar"
  328. elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'oracle':
  329. ranger_jdbc_jar_name = "ojdbc6.jar"
  330. ranger_jdbc_symlink_name = "oracle-jdbc-driver.jar"
  331. ranger_downloaded_custom_connector = format("{tmp_dir}/{ranger_jdbc_jar_name}")
  332. ranger_driver_curl_source = format("{jdk_location}/{ranger_jdbc_symlink_name}")
  333. ranger_driver_curl_target = format("{java_share_dir}/{ranger_jdbc_jar_name}")
  334. if security_enabled:
  335. hive_principal = hive_server_principal.replace('_HOST',hostname.lower())