params.py 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407
  1. #!/usr/bin/env python
  2. """
  3. Licensed to the Apache Software Foundation (ASF) under one
  4. or more contributor license agreements. See the NOTICE file
  5. distributed with this work for additional information
  6. regarding copyright ownership. The ASF licenses this file
  7. to you under the Apache License, Version 2.0 (the
  8. "License"); you may not use this file except in compliance
  9. with the License. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. """
  17. from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
  18. from resource_management.libraries.functions.default import default
  19. from resource_management import *
  20. import status_params
  21. import os
  22. # server configurations
  23. config = Script.get_config()
  24. tmp_dir = Script.get_tmp_dir()
  25. stack_name = default("/hostLevelParams/stack_name", None)
  26. # node hostname
  27. hostname = config["hostname"]
  28. # This is expected to be of the form #.#.#.#
  29. stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
  30. hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
  31. stack_is_hdp21 = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.1') >= 0 and compare_versions(hdp_stack_version, '2.2') < 0
  32. # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
  33. version = default("/commandParams/version", None)
  34. # Hadoop params
  35. # TODO, this logic should initialize these parameters in a file inside the HDP 2.2 stack.
  36. if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
  37. # start out with client libraries
  38. hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
  39. hadoop_home = '/usr/hdp/current/hadoop-client'
  40. hive_bin = '/usr/hdp/current/hive-client/bin'
  41. hive_lib = '/usr/hdp/current/hive-client/lib'
  42. # if this is a server action, then use the server binaries; smoke tests
  43. # use the client binaries
  44. command_role = default("/role", "")
  45. server_role_dir_mapping = { 'HIVE_SERVER' : 'hive-server2',
  46. 'HIVE_METASTORE' : 'hive-metastore' }
  47. if command_role in server_role_dir_mapping:
  48. hive_server_root = server_role_dir_mapping[command_role]
  49. hive_bin = format('/usr/hdp/current/{hive_server_root}/bin')
  50. hive_lib = format('/usr/hdp/current/{hive_server_root}/lib')
  51. # there are no client versions of these, use server versions directly
  52. hcat_lib = '/usr/hdp/current/hive-webhcat/share/hcatalog'
  53. webhcat_bin_dir = '/usr/hdp/current/hive-webhcat/sbin'
  54. hive_specific_configs_supported = True
  55. else:
  56. hadoop_bin_dir = "/usr/bin"
  57. hadoop_home = '/usr'
  58. hadoop_streeming_jars = '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar'
  59. hive_bin = '/usr/lib/hive/bin'
  60. hive_lib = '/usr/lib/hive/lib/'
  61. pig_tar_file = '/usr/share/HDP-webhcat/pig.tar.gz'
  62. hive_tar_file = '/usr/share/HDP-webhcat/hive.tar.gz'
  63. sqoop_tar_file = '/usr/share/HDP-webhcat/sqoop*.tar.gz'
  64. if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
  65. hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
  66. webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
  67. # for newer versions
  68. else:
  69. hcat_lib = '/usr/lib/hive-hcatalog/share/hcatalog'
  70. webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
  71. hive_specific_configs_supported = False
  72. hadoop_conf_dir = "/etc/hadoop/conf"
  73. hive_conf_dir_prefix = "/etc/hive"
  74. hive_conf_dir = format("{hive_conf_dir_prefix}/conf")
  75. hive_client_conf_dir = format("{hive_conf_dir_prefix}/conf")
  76. hive_server_conf_dir = format("{hive_conf_dir_prefix}/conf.server")
  77. if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
  78. hcat_conf_dir = '/etc/hcatalog/conf'
  79. config_dir = '/etc/hcatalog/conf'
  80. # for newer versions
  81. else:
  82. hcat_conf_dir = '/etc/hive-hcatalog/conf'
  83. config_dir = '/etc/hive-webhcat/conf'
  84. execute_path = os.environ['PATH'] + os.pathsep + hive_bin + os.pathsep + hadoop_bin_dir
  85. hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
  86. hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
  87. hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']
  88. hive_metastore_db_type = config['configurations']['hive-env']['hive_database_type']
  89. #HACK Temporarily use dbType=azuredb while invoking schematool
  90. if hive_metastore_db_type == "mssql":
  91. hive_metastore_db_type = "azuredb"
  92. #users
  93. hive_user = config['configurations']['hive-env']['hive_user']
  94. #JDBC driver jar name
  95. hive_jdbc_driver = config['configurations']['hive-site']['javax.jdo.option.ConnectionDriverName']
  96. if hive_jdbc_driver == "com.microsoft.sqlserver.jdbc.SQLServerDriver":
  97. jdbc_jar_name = "sqljdbc4.jar"
  98. jdbc_symlink_name = "mssql-jdbc-driver.jar"
  99. elif hive_jdbc_driver == "com.mysql.jdbc.Driver":
  100. jdbc_jar_name = "mysql-connector-java.jar"
  101. jdbc_symlink_name = "mysql-jdbc-driver.jar"
  102. elif hive_jdbc_driver == "org.postgresql.Driver":
  103. jdbc_jar_name = "postgresql-jdbc.jar"
  104. jdbc_symlink_name = "postgres-jdbc-driver.jar"
  105. elif hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
  106. jdbc_jar_name = "ojdbc.jar"
  107. jdbc_symlink_name = "oracle-jdbc-driver.jar"
  108. check_db_connection_jar_name = "DBConnectionVerification.jar"
  109. check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
  110. hive_jdbc_drivers_list = ["com.microsoft.sqlserver.jdbc.SQLServerDriver","com.mysql.jdbc.Driver","org.postgresql.Driver","oracle.jdbc.driver.OracleDriver"]
  111. downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
  112. prepackaged_ojdbc_symlink = format("{hive_lib}/ojdbc6.jar")
  113. #common
  114. hive_metastore_hosts = config['clusterHostInfo']['hive_metastore_host']
  115. hive_metastore_host = hive_metastore_hosts[0]
  116. hive_metastore_port = get_port_from_url(config['configurations']['hive-site']['hive.metastore.uris']) #"9083"
  117. hive_var_lib = '/var/lib/hive'
  118. ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
  119. hive_server_host = config['clusterHostInfo']['hive_server_host'][0]
  120. hive_server_hosts = config['clusterHostInfo']['hive_server_host']
  121. hive_transport_mode = config['configurations']['hive-site']['hive.server2.transport.mode']
  122. if hive_transport_mode.lower() == "http":
  123. hive_server_port = config['configurations']['hive-site']['hive.server2.thrift.http.port']
  124. else:
  125. hive_server_port = default('/configurations/hive-site/hive.server2.thrift.port',"10000")
  126. hive_url = format("jdbc:hive2://{hive_server_host}:{hive_server_port}")
  127. hive_server_principal = config['configurations']['hive-site']['hive.server2.authentication.kerberos.principal']
  128. hive_server2_authentication = config['configurations']['hive-site']['hive.server2.authentication']
  129. smokeuser = config['configurations']['cluster-env']['smokeuser']
  130. smoke_test_sql = format("{tmp_dir}/hiveserver2.sql")
  131. smoke_test_path = format("{tmp_dir}/hiveserver2Smoke.sh")
  132. smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
  133. smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
  134. fs_root = config['configurations']['core-site']['fs.defaultFS']
  135. security_enabled = config['configurations']['cluster-env']['security_enabled']
  136. kinit_path_local = functions.get_kinit_path()
  137. hive_metastore_keytab_path = config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
  138. hive_server2_keytab = config['configurations']['hive-site']['hive.server2.authentication.kerberos.keytab']
  139. #hive_env
  140. hive_dbroot = config['configurations']['hive-env']['hive_dbroot']
  141. hive_log_dir = config['configurations']['hive-env']['hive_log_dir']
  142. hive_pid_dir = status_params.hive_pid_dir
  143. hive_pid = status_params.hive_pid
  144. #Default conf dir for client
  145. hive_conf_dirs_list = [hive_client_conf_dir]
  146. if hostname in hive_metastore_hosts or hostname in hive_server_hosts:
  147. hive_conf_dirs_list.append(hive_server_conf_dir)
  148. if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]:
  149. hive_config_dir = hive_server_conf_dir
  150. else:
  151. hive_config_dir = hive_client_conf_dir
  152. #hive-site
  153. hive_database_name = config['configurations']['hive-env']['hive_database_name']
  154. hive_database = config['configurations']['hive-env']['hive_database']
  155. #Starting hiveserver2
  156. start_hiveserver2_script = 'startHiveserver2.sh.j2'
  157. ##Starting metastore
  158. start_metastore_script = 'startMetastore.sh'
  159. hive_metastore_pid = status_params.hive_metastore_pid
  160. java_share_dir = '/usr/share/java'
  161. driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
  162. hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
  163. user_group = config['configurations']['cluster-env']['user_group']
  164. artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
  165. target = format("{hive_lib}/{jdbc_jar_name}")
  166. jdk_location = config['hostLevelParams']['jdk_location']
  167. driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
  168. start_hiveserver2_path = format("{tmp_dir}/start_hiveserver2_script")
  169. start_metastore_path = format("{tmp_dir}/start_metastore_script")
  170. hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
  171. hive_heapsize = config['configurations']['hive-site']['hive.heapsize']
  172. java64_home = config['hostLevelParams']['java_home']
  173. ##### MYSQL
  174. db_name = config['configurations']['hive-env']['hive_database_name']
  175. mysql_group = 'mysql'
  176. mysql_host = config['clusterHostInfo']['hive_mysql_host']
  177. mysql_adduser_path = format("{tmp_dir}/addMysqlUser.sh")
  178. mysql_deluser_path = format("{tmp_dir}/removeMysqlUser.sh")
  179. ######## Metastore Schema
  180. if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
  181. init_metastore_schema = False
  182. else:
  183. init_metastore_schema = True
  184. ########## HCAT
  185. hcat_dbroot = hcat_lib
  186. hcat_user = config['configurations']['hive-env']['hcat_user']
  187. webhcat_user = config['configurations']['hive-env']['webhcat_user']
  188. hcat_pid_dir = status_params.hcat_pid_dir
  189. hcat_log_dir = config['configurations']['hive-env']['hcat_log_dir']
  190. hcat_env_sh_template = config['configurations']['hcat-env']['content']
  191. #hive-log4j.properties.template
  192. if (('hive-log4j' in config['configurations']) and ('content' in config['configurations']['hive-log4j'])):
  193. log4j_props = config['configurations']['hive-log4j']['content']
  194. else:
  195. log4j_props = None
  196. #webhcat-log4j.properties.template
  197. if (('webhcat-log4j' in config['configurations']) and ('content' in config['configurations']['webhcat-log4j'])):
  198. log4j_webhcat_props = config['configurations']['webhcat-log4j']['content']
  199. else:
  200. log4j_webhcat_props = None
  201. #hive-exec-log4j.properties.template
  202. if (('hive-exec-log4j' in config['configurations']) and ('content' in config['configurations']['hive-exec-log4j'])):
  203. log4j_exec_props = config['configurations']['hive-exec-log4j']['content']
  204. else:
  205. log4j_exec_props = None
  206. daemon_name = status_params.daemon_name
  207. process_name = status_params.process_name
  208. hive_env_sh_template = config['configurations']['hive-env']['content']
  209. hive_hdfs_user_dir = format("/user/{hive_user}")
  210. hive_hdfs_user_mode = 0700
  211. hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.warehouse.dir"]
  212. #for create_hdfs_directory
  213. hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
  214. hdfs_principal_name = default('/configurations/hadoop-env/hdfs_principal_name', 'missing_principal').replace("_HOST", hostname)
  215. # Tez-related properties
  216. tez_user = config['configurations']['tez-env']['tez_user']
  217. # Tez jars
  218. tez_local_api_jars = '/usr/lib/tez/tez*.jar'
  219. tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
  220. app_dir_files = {tez_local_api_jars:None}
  221. # Tez libraries
  222. tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
  223. if System.get_instance().os_family == "ubuntu":
  224. mysql_configname = '/etc/mysql/my.cnf'
  225. else:
  226. mysql_configname = '/etc/my.cnf'
  227. mysql_user = 'mysql'
  228. # Hive security
  229. hive_authorization_enabled = config['configurations']['hive-site']['hive.security.authorization.enabled']
  230. mysql_jdbc_driver_jar = "/usr/share/java/mysql-connector-java.jar"
  231. hive_use_existing_db = hive_database.startswith('Existing')
  232. hive_exclude_packages = []
  233. # There are other packages that contain /usr/share/java/mysql-connector-java.jar (like libmysql-java),
  234. # trying to install mysql-connector-java upon them can cause packages to conflict.
  235. if hive_use_existing_db:
  236. hive_exclude_packages = ['mysql-connector-java','mysql','mysql-server']
  237. else:
  238. if 'role' in config and config['role'] != "MYSQL_SERVER":
  239. hive_exclude_packages = ['mysql','mysql-server']
  240. if os.path.exists(mysql_jdbc_driver_jar):
  241. hive_exclude_packages.append('mysql-connector-java')
  242. ########################################################
  243. ########### WebHCat related params #####################
  244. ########################################################
  245. webhcat_env_sh_template = config['configurations']['webhcat-env']['content']
  246. templeton_log_dir = config['configurations']['hive-env']['hcat_log_dir']
  247. templeton_pid_dir = status_params.hcat_pid_dir
  248. webhcat_pid_file = status_params.webhcat_pid_file
  249. templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
  250. webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
  251. webhcat_apps_dir = "/apps/webhcat"
  252. hcat_hdfs_user_dir = format("/user/{hcat_user}")
  253. hcat_hdfs_user_mode = 0755
  254. webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
  255. webhcat_hdfs_user_mode = 0755
  256. #for create_hdfs_directory
  257. security_param = "true" if security_enabled else "false"
  258. import functools
  259. #create partial functions with common arguments for every HdfsDirectory call
  260. #to create hdfs directory we need to call params.HdfsDirectory in code
  261. HdfsDirectory = functools.partial(
  262. HdfsDirectory,
  263. conf_dir = hadoop_conf_dir,
  264. hdfs_user = hdfs_user,
  265. security_enabled = security_enabled,
  266. keytab = hdfs_user_keytab,
  267. kinit_path_local = kinit_path_local,
  268. bin_dir = hadoop_bin_dir
  269. )
  270. # ranger host
  271. ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
  272. has_ranger_admin = not len(ranger_admin_hosts) == 0
  273. if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
  274. # setting flag value for ranger hive plugin
  275. enable_ranger_hive = False
  276. ranger_plugin_enable = default("/configurations/ranger-hive-plugin-properties/ranger-hive-plugin-enabled", "no")
  277. if ranger_plugin_enable.lower() == 'yes':
  278. enable_ranger_hive = True
  279. elif ranger_plugin_enable.lower() == 'no':
  280. enable_ranger_hive = False
  281. #ranger hive properties
  282. policymgr_mgr_url = default("/configurations/admin-properties/policymgr_external_url", "http://localhost:6080")
  283. sql_connector_jar = default("/configurations/admin-properties/SQL_CONNECTOR_JAR", "/usr/share/java/mysql-connector-java.jar")
  284. xa_audit_db_flavor = default("/configurations/admin-properties/DB_FLAVOR", "MYSQL")
  285. xa_audit_db_name = default("/configurations/admin-properties/audit_db_name", "ranger_audit")
  286. xa_audit_db_user = default("/configurations/admin-properties/audit_db_user", "rangerlogger")
  287. xa_audit_db_password = default("/configurations/admin-properties/audit_db_password", "rangerlogger")
  288. xa_db_host = default("/configurations/admin-properties/db_host", "localhost")
  289. repo_name = str(config['clusterName']) + '_hive'
  290. db_enabled = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.DB.IS_ENABLED", "false")
  291. hdfs_enabled = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.IS_ENABLED", "false")
  292. hdfs_dest_dir = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINATION_DIRECTORY", "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/app-type/time:yyyyMMdd")
  293. hdfs_buffer_dir = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY", "__REPLACE__LOG_DIR/hadoop/app-type/audit")
  294. hdfs_archive_dir = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY", "__REPLACE__LOG_DIR/hadoop/app-type/audit/archive")
  295. hdfs_dest_file = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_FILE", "hostname-audit.log")
  296. hdfs_dest_flush_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS", "900")
  297. hdfs_dest_rollover_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS", "86400")
  298. hdfs_dest_open_retry_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS", "60")
  299. hdfs_buffer_file = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_FILE", "time:yyyyMMdd-HHmm.ss.log")
  300. hdfs_buffer_flush_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS", "60")
  301. hdfs_buffer_rollover_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS", "600")
  302. hdfs_archive_max_file_count = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT", "10")
  303. ssl_keystore_file = default("/configurations/ranger-hive-plugin-properties/SSL_KEYSTORE_FILE_PATH", "/etc/hadoop/conf/ranger-plugin-keystore.jks")
  304. ssl_keystore_password = default("/configurations/ranger-hive-plugin-properties/SSL_KEYSTORE_PASSWORD", "myKeyFilePassword")
  305. ssl_truststore_file = default("/configurations/ranger-hive-plugin-properties/SSL_TRUSTSTORE_FILE_PATH", "/etc/hadoop/conf/ranger-plugin-truststore.jks")
  306. ssl_truststore_password = default("/configurations/ranger-hive-plugin-properties/SSL_TRUSTSTORE_PASSWORD", "changeit")
  307. grant_revoke = default("/configurations/ranger-hive-plugin-properties/UPDATE_XAPOLICIES_ON_GRANT_REVOKE","true")
  308. jdbc_driver_class_name = default("/configurations/ranger-hive-plugin-properties/jdbc.driverClassName","")
  309. common_name_for_certificate = default("/configurations/ranger-hive-plugin-properties/common.name.for.certificate", "-")
  310. repo_config_username = default("/configurations/ranger-hive-plugin-properties/REPOSITORY_CONFIG_USERNAME", "hive")
  311. repo_config_password = default("/configurations/ranger-hive-plugin-properties/REPOSITORY_CONFIG_PASSWORD", "hive")
  312. admin_uname = default("/configurations/ranger-env/admin_username", "admin")
  313. admin_password = default("/configurations/ranger-env/admin_password", "admin")
  314. admin_uname_password = format("{admin_uname}:{admin_password}")
  315. ambari_ranger_admin = default("/configurations/ranger-env/ranger_admin_username", "amb_ranger_admin")
  316. ambari_ranger_password = default("/configurations/ranger-env/ranger_admin_password", "ambari123")
  317. policy_user = default("/configurations/ranger-hive-plugin-properties/policy_user", "ambari-qa")
  318. #For curl command in ranger plugin to get db connector
  319. if xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'mysql':
  320. ranger_jdbc_symlink_name = "mysql-jdbc-driver.jar"
  321. ranger_jdbc_jar_name = "mysql-connector-java.jar"
  322. elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'oracle':
  323. ranger_jdbc_jar_name = "ojdbc6.jar"
  324. ranger_jdbc_symlink_name = "oracle-jdbc-driver.jar"
  325. ranger_downloaded_custom_connector = format("{tmp_dir}/{ranger_jdbc_jar_name}")
  326. ranger_driver_curl_source = format("{jdk_location}/{ranger_jdbc_symlink_name}")
  327. ranger_driver_curl_target = format("{java_share_dir}/{ranger_jdbc_jar_name}")
  328. if security_enabled:
  329. hive_principal = hive_server_principal.replace('_HOST',hostname.lower())