Переглянути джерело

AMBARI-10787. Knox,Storm Ranger plugins fails to install with non-root agent (aonishuk)

Andrew Onishuk 10 роки тому
батько
коміт
9d9c00fcc2

+ 2 - 2
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py

@@ -325,7 +325,7 @@ ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
 has_ranger_admin = not len(ranger_admin_hosts) == 0
 has_ranger_admin = not len(ranger_admin_hosts) == 0
 
 
 if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
 if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
-  enable_ranger_hive = (config['configurations']['ranger-hdfs-plugin-properties']['ranger-hdfs-plugin-enabled'].lower() == 'yes')
+  enable_ranger_hdfs = (config['configurations']['ranger-hdfs-plugin-properties']['ranger-hdfs-plugin-enabled'].lower() == 'yes')
 
 
 ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
 ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
 
 
@@ -337,7 +337,7 @@ xa_audit_db_name = config['configurations']['admin-properties']['audit_db_name']
 xa_audit_db_user = config['configurations']['admin-properties']['audit_db_user']
 xa_audit_db_user = config['configurations']['admin-properties']['audit_db_user']
 xa_audit_db_password = config['configurations']['admin-properties']['audit_db_password']
 xa_audit_db_password = config['configurations']['admin-properties']['audit_db_password']
 xa_db_host = config['configurations']['admin-properties']['db_host']
 xa_db_host = config['configurations']['admin-properties']['db_host']
-repo_name = str(config['clusterName']) + '_hdfs'
+repo_name = str(config['clusterName']) + '_hadoop'
 
 
 hadoop_security_authentication = config['configurations']['core-site']['hadoop.security.authentication']
 hadoop_security_authentication = config['configurations']['core-site']['hadoop.security.authentication']
 hadoop_security_authorization = config['configurations']['core-site']['hadoop.security.authorization']
 hadoop_security_authorization = config['configurations']['core-site']['hadoop.security.authorization']

+ 48 - 0
ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/configuration/ranger-knox-plugin-properties.xml

@@ -158,5 +158,53 @@
     <value>changeit</value>
     <value>changeit</value>
     <description></description>
     <description></description>
   </property>
   </property>
+  
+  <property>
+    <name>POLICY_MGR_URL</name>
+    <value>{{policymgr_mgr_url}}</value>
+    <description>Policy Manager url</description>    
+  </property> 
+  
+  <property>
+    <name>SQL_CONNECTOR_JAR</name>
+    <value>{{sql_connector_jar}}</value>
+    <description>Location of DB client library (please check the location of the jar file)</description>    
+  </property> 
+  
+  <property>
+    <name>XAAUDIT.DB.FLAVOUR</name>
+    <value>{{xa_audit_db_flavor}}</value>
+    <description>The database type to be used (mysql/oracle)</description>    
+  </property> 
+  
+  <property>
+    <name>XAAUDIT.DB.DATABASE_NAME</name>
+    <value>{{xa_audit_db_name}}</value>
+    <description>Audit database name</description>    
+  </property> 
+  
+  <property>
+    <name>XAAUDIT.DB.USER_NAME</name>
+    <value>{{xa_audit_db_user}}</value>
+    <description>Audit database user</description>    
+  </property> 
+  
+  <property>
+    <name>XAAUDIT.DB.PASSWORD</name>
+    <value>{{xa_audit_db_password}}</value>
+    <description>Audit database password</description>    
+  </property>
+  
+  <property>
+    <name>XAAUDIT.DB.HOSTNAME</name>
+    <value>{{xa_db_host}}</value>
+    <description>Audit database password</description>    
+  </property>
+  
+  <property>
+    <name>REPOSITORY_NAME</name>
+    <value>{{repo_name}}</value>
+    <description>Ranger repository name</description>    
+  </property>
 
 
 </configuration>
 </configuration>

+ 75 - 70
ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py

@@ -23,6 +23,7 @@ from resource_management.libraries.functions.version import format_hdp_stack_ver
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management import *
 from resource_management import *
 import status_params
 import status_params
+import json
 from ambari_commons import OSCheck
 from ambari_commons import OSCheck
 
 
 if OSCheck.is_windows_family():
 if OSCheck.is_windows_family():
@@ -107,8 +108,7 @@ has_oozie = not oozie_server_host == None
 oozie_server_port = "11000"
 oozie_server_port = "11000"
 
 
 if has_oozie:
 if has_oozie:
-    if 'oozie.base.url' in config['configurations']['oozie-site']:
-        oozie_server_port = get_port_from_url(config['configurations']['oozie-site']['oozie.base.url'])
+  oozie_server_port = get_port_from_url(config['configurations']['oozie-site']['oozie.base.url'])
 
 
 # Knox managed properties
 # Knox managed properties
 knox_managed_pid_symlink= "/usr/hdp/current/knox-server/pids"
 knox_managed_pid_symlink= "/usr/hdp/current/knox-server/pids"
@@ -141,80 +141,85 @@ ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
 has_ranger_admin = not len(ranger_admin_hosts) == 0
 has_ranger_admin = not len(ranger_admin_hosts) == 0
 
 
 if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
 if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
-  # Setting Flag value for ranger hbase plugin
-  enable_ranger_knox = False
-  ranger_plugin_enable = default("/configurations/ranger-knox-plugin-properties/ranger-knox-plugin-enabled", "no")
-  if ranger_plugin_enable.lower() == 'yes':
-    enable_ranger_knox = True
-  elif ranger_plugin_enable.lower() == 'no':
-    enable_ranger_knox = False
+  enable_ranger_knox = (config['configurations']['ranger-knox-plugin-properties']['ranger-knox-plugin-enabled'].lower() == 'yes')
 
 
 ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
 ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
 
 
 # ranger knox properties
 # ranger knox properties
-policymgr_mgr_url = default("/configurations/admin-properties/policymgr_external_url", "http://localhost:6080")
-sql_connector_jar = default("/configurations/admin-properties/SQL_CONNECTOR_JAR", "/usr/share/java/mysql-connector-java.jar")
-xa_audit_db_flavor = default("/configurations/admin-properties/DB_FLAVOR", "MYSQL")
-xa_audit_db_name = default("/configurations/admin-properties/audit_db_name", "ranger_audit")
-xa_audit_db_user = default("/configurations/admin-properties/audit_db_user", "rangerlogger")
-xa_audit_db_password = default("/configurations/admin-properties/audit_db_password", "rangerlogger")
-xa_db_host = default("/configurations/admin-properties/db_host", "localhost")
+policymgr_mgr_url = config['configurations']['admin-properties']['policymgr_external_url']
+sql_connector_jar = config['configurations']['admin-properties']['SQL_CONNECTOR_JAR']
+xa_audit_db_flavor = config['configurations']['admin-properties']['DB_FLAVOR']
+xa_audit_db_name = config['configurations']['admin-properties']['audit_db_name']
+xa_audit_db_user = config['configurations']['admin-properties']['audit_db_user']
+xa_audit_db_password = config['configurations']['admin-properties']['audit_db_password']
+xa_db_host = config['configurations']['admin-properties']['db_host']
 repo_name = str(config['clusterName']) + '_knox'
 repo_name = str(config['clusterName']) + '_knox'
-db_enabled = default("/configurations/ranger-knox-plugin-properties/XAAUDIT.DB.IS_ENABLED", "false")
-hdfs_enabled = default("/configurations/ranger-knox-plugin-properties/XAAUDIT.HDFS.IS_ENABLED", "false")
-hdfs_dest_dir = default("/configurations/ranger-knox-plugin-properties/XAAUDIT.HDFS.DESTINATION_DIRECTORY", "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/app-type/time:yyyyMMdd")
-hdfs_buffer_dir = default("/configurations/ranger-knox-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY", "__REPLACE__LOG_DIR/hadoop/app-type/audit")
-hdfs_archive_dir = default("/configurations/ranger-knox-plugin-properties/XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY", "__REPLACE__LOG_DIR/hadoop/app-type/audit/archive")
-hdfs_dest_file = default("/configurations/ranger-knox-plugin-properties/XAAUDIT.HDFS.DESTINTATION_FILE", "hostname-audit.log")
-hdfs_dest_flush_int_sec = default("/configurations/ranger-knox-plugin-properties/XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS", "900")
-hdfs_dest_rollover_int_sec = default("/configurations/ranger-knox-plugin-properties/XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS", "86400")
-hdfs_dest_open_retry_int_sec = default("/configurations/ranger-knox-plugin-properties/XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS", "60")
-hdfs_buffer_file = default("/configurations/ranger-knox-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_FILE", "time:yyyyMMdd-HHmm.ss.log")
-hdfs_buffer_flush_int_sec = default("/configurations/ranger-knox-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS", "60")
-hdfs_buffer_rollover_int_sec = default("/configurations/ranger-knox-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS", "600")
-hdfs_archive_max_file_count = default("/configurations/ranger-knox-plugin-properties/XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT", "10")
-ssl_keystore_file = default("/configurations/ranger-knox-plugin-properties/SSL_KEYSTORE_FILE_PATH", "/etc/hadoop/conf/ranger-plugin-keystore.jks")
-ssl_keystore_password = default("/configurations/ranger-knox-plugin-properties/SSL_KEYSTORE_PASSWORD", "myKeyFilePassword")
-ssl_truststore_file = default("/configurations/ranger-knox-plugin-properties/SSL_TRUSTSTORE_FILE_PATH", "/etc/hadoop/conf/ranger-plugin-truststore.jks")
-ssl_truststore_password = default("/configurations/ranger-knox-plugin-properties/SSL_TRUSTSTORE_PASSWORD", "changeit")
-
-knox_home = default("/configurations/ranger-knox-plugin-properties/KNOX_HOME", "/usr/hdp/current/knox-server")
-common_name_for_certificate = default("/configurations/ranger-knox-plugin-properties/common.name.for.certificate", "-")
-
-repo_config_username = default("/configurations/ranger-knox-plugin-properties/REPOSITORY_CONFIG_USERNAME", "hbase")
-repo_config_password = default("/configurations/ranger-knox-plugin-properties/REPOSITORY_CONFIG_PASSWORD", "hbase")
-
-admin_uname = default("/configurations/ranger-env/admin_username", "admin")
-admin_password = default("/configurations/ranger-env/admin_password", "admin")
-admin_uname_password = format("{admin_uname}:{admin_password}")
-
-ambari_ranger_admin = default("/configurations/ranger-env/ranger_admin_username", "amb_ranger_admin")
-ambari_ranger_password = default("/configurations/ranger-env/ranger_admin_password", "ambari123")
-policy_user = default("/configurations/ranger-knox-plugin-properties/policy_user", "ambari-qa")
+
+knox_home = config['configurations']['ranger-knox-plugin-properties']['KNOX_HOME']
+common_name_for_certificate = config['configurations']['ranger-knox-plugin-properties']['common.name.for.certificate']
+
+repo_config_username = config['configurations']['ranger-knox-plugin-properties']['REPOSITORY_CONFIG_USERNAME']
+repo_config_password = config['configurations']['ranger-knox-plugin-properties']['REPOSITORY_CONFIG_PASSWORD']
+
+ranger_env = config['configurations']['ranger-env']
+ranger_plugin_properties = config['configurations']['ranger-knox-plugin-properties']
+policy_user = config['configurations']['ranger-knox-plugin-properties']['policy_user']
 
 
 #For curl command in ranger plugin to get db connector
 #For curl command in ranger plugin to get db connector
 jdk_location = config['hostLevelParams']['jdk_location']
 jdk_location = config['hostLevelParams']['jdk_location']
 java_share_dir = '/usr/share/java'
 java_share_dir = '/usr/share/java'
-if xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'mysql':
-  jdbc_symlink_name = "mysql-jdbc-driver.jar"
-  jdbc_jar_name = "mysql-connector-java.jar"
-elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'oracle':
-  jdbc_jar_name = "ojdbc6.jar"
-  jdbc_symlink_name = "oracle-jdbc-driver.jar"
-elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'postgres':
-  jdbc_jar_name = "postgresql.jar"
-  jdbc_symlink_name = "postgres-jdbc-driver.jar"
-elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'sqlserver':
-  jdbc_jar_name = "sqljdbc4.jar"
-  jdbc_symlink_name = "mssql-jdbc-driver.jar"
-
-downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
-
-driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
-driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
-
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.3') >= 0:
-  solr_enabled = default("/configurations/ranger-knox-plugin-properties/XAAUDIT.SOLR.IS_ENABLED", "false")
-  solr_max_queue_size = default("/configurations/ranger-knox-plugin-properties/XAAUDIT.SOLR.MAX_QUEUE_SIZE", "1")
-  solr_max_flush_interval = default("/configurations/ranger-knox-plugin-properties/XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS", "1000")
-  solr_url = default("/configurations/ranger-knox-plugin-properties/XAAUDIT.SOLR.SOLR_URL", "http://localhost:6083/solr/ranger_audits")
+if has_ranger_admin:
+  if xa_audit_db_flavor.lower() == 'mysql':
+    jdbc_symlink_name = "mysql-jdbc-driver.jar"
+    jdbc_jar_name = "mysql-connector-java.jar"
+  elif xa_audit_db_flavor.lower() == 'oracle':
+    jdbc_jar_name = "ojdbc6.jar"
+    jdbc_symlink_name = "oracle-jdbc-driver.jar"
+  elif nxa_audit_db_flavor.lower() == 'postgres':
+    jdbc_jar_name = "postgresql.jar"
+    jdbc_symlink_name = "postgres-jdbc-driver.jar"
+  elif xa_audit_db_flavor.lower() == 'sqlserver':
+    jdbc_jar_name = "sqljdbc4.jar"
+    jdbc_symlink_name = "mssql-jdbc-driver.jar"
+
+  downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
+  
+  driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
+  driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
+
+knox_ranger_plugin_config = {
+  'username': repo_config_username,
+  'password': repo_config_password,
+  'knox.url': format("https://{knox_host_name}:{knox_host_port}/gateway/admin/api/v1/topologies"),
+  'commonNameForCertificate': common_name_for_certificate
+}
+
+knox_ranger_plugin_repo = {
+  'isActive': 'true',
+  'config': json.dumps(knox_ranger_plugin_config),
+  'description': 'knox repo',
+  'name': repo_name,
+  'repositoryType': 'knox',
+  'assetType': '5',
+}
+
+def knox_repo_properties():
+  import params
+
+  config_dict = dict()
+  config_dict['username'] = params.repo_config_username
+  config_dict['password'] = params.repo_config_password
+  config_dict['knox.url'] = 'https://' + params.knox_host_name + ':' + str(params.knox_host_port) +'/gateway/admin/api/v1/topologies'
+  config_dict['commonNameForCertificate'] = params.common_name_for_certificate
+
+  repo= dict()
+  repo['isActive'] = "true"
+  repo['config'] = json.dumps(config_dict)
+  repo['description'] = "knox repo"
+  repo['name'] = params.repo_name
+  repo['repositoryType'] = "knox"
+  repo['assetType'] = '5'
+
+  data = json.dumps(repo)
+
+  return data

+ 11 - 177
ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/setup_ranger_knox.py

@@ -8,7 +8,7 @@ to you under the Apache License, Version 2.0 (the
 "License"); you may not use this file except in compliance
 "License"); you may not use this file except in compliance
 with the License.  You may obtain a copy of the License at
 with the License.  You may obtain a copy of the License at
 
 
-  http://www.apache.org/licenses/LICENSE-2.0
+    http://www.apache.org/licenses/LICENSE-2.0
 
 
 Unless required by applicable law or agreed to in writing, software
 Unless required by applicable law or agreed to in writing, software
 distributed under the License is distributed on an "AS IS" BASIS,
 distributed under the License is distributed on an "AS IS" BASIS,
@@ -17,185 +17,19 @@ See the License for the specific language governing permissions and
 limitations under the License.
 limitations under the License.
 
 
 """
 """
-import sys
-import fileinput
-import subprocess
-import json
-import re
-import os
 from resource_management import *
 from resource_management import *
-from resource_management.libraries.functions.ranger_functions import Rangeradmin
-from resource_management.core.logger import Logger
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
 
 
 def setup_ranger_knox():
 def setup_ranger_knox():
   import params
   import params
-
+  
   if params.has_ranger_admin:
   if params.has_ranger_admin:
-    File(params.downloaded_custom_connector,
-         content = DownloadSource(params.driver_curl_source),
-    )
-
-    if not os.path.isfile(params.driver_curl_target):
-      Execute(('cp', '--remove-destination', params.downloaded_custom_connector, params.driver_curl_target),
-              path=["/bin", "/usr/bin/"],
-              sudo=True)
-
-    try:
-      command = 'hdp-select status knox-server'
-      return_code, hdp_output = shell.call(command, timeout=20)
-    except Exception, e:
-      Logger.error(str(e))
-      raise Fail('Unable to execute hdp-select command to retrieve the version.')
-
-    if return_code != 0:
-      raise Fail('Unable to determine the current version because of a non-zero return code of {0}'.format(str(return_code)))
-
-    hdp_version = re.sub('knox-server - ', '', hdp_output).strip()
-    match = re.match('[0-9]+.[0-9]+.[0-9]+.[0-9]+-[0-9]+', hdp_version)
-
-    if match is None:
-      raise Fail('Failed to get extracted version')
-
-    file_path = '/usr/hdp/'+ hdp_version +'/ranger-knox-plugin/install.properties'
-    if not os.path.isfile(file_path):
-      raise Fail('Ranger Knox plugin install.properties file does not exist at {0}'.format(file_path))
-
-    ranger_knox_dict = ranger_knox_properties()
-    knox_repo_data = knox_repo_properties()
-
-    write_properties_to_file(file_path, ranger_knox_dict)
-
-    if params.enable_ranger_knox:
-      cmd = format('cd /usr/hdp/{hdp_version}/ranger-knox-plugin/ && sh enable-knox-plugin.sh')
-      ranger_adm_obj = Rangeradmin(url=ranger_knox_dict['POLICY_MGR_URL'])
-      response_code, response_recieved = ranger_adm_obj.check_ranger_login_urllib2(ranger_knox_dict['POLICY_MGR_URL'] + '/login.jsp', 'test:test')
-
-      if response_code is not None and response_code == 200:
-        ambari_ranger_admin, ambari_ranger_password = ranger_adm_obj.create_ambari_admin_user(params.ambari_ranger_admin, params.ambari_ranger_password, params.admin_uname_password)
-        ambari_username_password_for_ranger = ambari_ranger_admin + ':' + ambari_ranger_password
-        if ambari_ranger_admin != '' and ambari_ranger_password != '':
-          repo = ranger_adm_obj.get_repository_by_name_urllib2(ranger_knox_dict['REPOSITORY_NAME'], 'knox', 'true', ambari_username_password_for_ranger)
-          if repo and repo['name'] == ranger_knox_dict['REPOSITORY_NAME']:
-            Logger.info('Knox Repository exist')
-          else:
-            response = ranger_adm_obj.create_repository_urllib2(knox_repo_data, ambari_username_password_for_ranger, params.policy_user)
-            if response is not None:
-              Logger.info('Knox Repository created in Ranger Admin')
-            else:
-              Logger.info('Knox Repository creation failed in Ranger Admin')
-        else:
-          Logger.info('Ambari admin username and password are blank ')
-      else:
-        Logger.info('Ranger service is not started on given host')
-    else:
-      cmd = format('cd /usr/hdp/{hdp_version}/ranger-knox-plugin/ && sh disable-knox-plugin.sh')
-
-    Execute(cmd, environment={'JAVA_HOME': params.java_home}, logoutput=True)
-  else:
-    Logger.info('Ranger admin not installed') 
-
-
-def write_properties_to_file(file_path, value):
-  for key in value:
-    modify_config(file_path, key, value[key])
-
-
-def modify_config(filepath, variable, setting):
-  var_found = False
-  already_set = False
-  V=str(variable)
-  S=str(setting)
-  # use quotes if setting has spaces #
-  if ' ' in S:
-    S = '%s' % S
-
-  for line in fileinput.input(filepath, inplace = 1):
-    # process lines that look like config settings #
-    if not line.lstrip(' ').startswith('#') and '=' in line:
-      _infile_var = str(line.split('=')[0].rstrip(' '))
-      _infile_set = str(line.split('=')[1].lstrip(' ').rstrip())
-      # only change the first matching occurrence #
-      if var_found == False and _infile_var.rstrip(' ') == V:
-        var_found = True
-        # don't change it if it is already set #
-        if _infile_set.lstrip(' ') == S:
-          already_set = True
-        else:
-          line = "%s=%s\n" % (V, S)
-
-    sys.stdout.write(line)
-
-  # Append the variable if it wasn't found #
-  if not var_found:
-    with open(filepath, "a") as f:
-      f.write("%s=%s\n" % (V, S))
-  elif already_set == True:
-    pass
+    setup_ranger_plugin('knox-server', 'knox', 
+                        params.downloaded_custom_connector, params.driver_curl_source,
+                        params.driver_curl_target, params.java_home,
+                        params.repo_name, params.knox_ranger_plugin_repo,
+                        params.ranger_env, params.ranger_plugin_properties,
+                        params.policy_user, params.policymgr_mgr_url,
+                        params.enable_ranger_knox
+    )                 
   else:
   else:
-    pass
-
-  return
-
-def ranger_knox_properties():
-  import params
-
-  ranger_knox_properties = dict()
-
-  ranger_knox_properties['POLICY_MGR_URL'] = params.policymgr_mgr_url
-  ranger_knox_properties['SQL_CONNECTOR_JAR'] = params.sql_connector_jar
-  ranger_knox_properties['XAAUDIT.DB.FLAVOUR'] = params.xa_audit_db_flavor
-  ranger_knox_properties['XAAUDIT.DB.DATABASE_NAME'] = params.xa_audit_db_name
-  ranger_knox_properties['XAAUDIT.DB.USER_NAME'] = params.xa_audit_db_user
-  ranger_knox_properties['XAAUDIT.DB.PASSWORD'] = params.xa_audit_db_password
-  ranger_knox_properties['XAAUDIT.DB.HOSTNAME'] = params.xa_db_host
-  ranger_knox_properties['REPOSITORY_NAME'] = params.repo_name
-  ranger_knox_properties['XAAUDIT.DB.IS_ENABLED'] = params.db_enabled
-  ranger_knox_properties['KNOX_HOME'] = params.knox_home
-
-  ranger_knox_properties['XAAUDIT.HDFS.IS_ENABLED'] = params.hdfs_enabled
-  ranger_knox_properties['XAAUDIT.HDFS.DESTINATION_DIRECTORY'] = params.hdfs_dest_dir
-  ranger_knox_properties['XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY'] = params.hdfs_buffer_dir
-  ranger_knox_properties['XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY'] = params.hdfs_archive_dir
-  ranger_knox_properties['XAAUDIT.HDFS.DESTINTATION_FILE'] = params.hdfs_dest_file
-  ranger_knox_properties['XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS'] = params.hdfs_dest_flush_int_sec
-  ranger_knox_properties['XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS'] = params.hdfs_dest_rollover_int_sec
-  ranger_knox_properties['XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS'] = params.hdfs_dest_open_retry_int_sec
-  ranger_knox_properties['XAAUDIT.HDFS.LOCAL_BUFFER_FILE'] = params.hdfs_buffer_file
-  ranger_knox_properties['XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS'] = params.hdfs_buffer_flush_int_sec
-  ranger_knox_properties['XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS'] = params.hdfs_buffer_rollover_int_sec
-  ranger_knox_properties['XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT'] = params.hdfs_archive_max_file_count
-
-  ranger_knox_properties['SSL_KEYSTORE_FILE_PATH'] = params.ssl_keystore_file
-  ranger_knox_properties['SSL_KEYSTORE_PASSWORD'] = params.ssl_keystore_password
-  ranger_knox_properties['SSL_TRUSTSTORE_FILE_PATH'] = params.ssl_truststore_file
-  ranger_knox_properties['SSL_TRUSTSTORE_PASSWORD'] = params.ssl_truststore_password
-
-  if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.3') >= 0:
-    ranger_knox_properties['XAAUDIT.SOLR.IS_ENABLED'] = str(params.solr_enabled).lower()
-    ranger_knox_properties['XAAUDIT.SOLR.MAX_QUEUE_SIZE'] = params.solr_max_queue_size
-    ranger_knox_properties['XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS'] = params.solr_max_flush_interval
-    ranger_knox_properties['XAAUDIT.SOLR.SOLR_URL'] = params.solr_url
-  
-  return ranger_knox_properties  
-
-def knox_repo_properties():
-  import params
-
-  config_dict = dict()
-  config_dict['username'] = params.repo_config_username
-  config_dict['password'] = params.repo_config_password
-  config_dict['knox.url'] = 'https://' + params.knox_host_name + ':' + str(params.knox_host_port) +'/gateway/admin/api/v1/topologies'
-  config_dict['commonNameForCertificate'] = params.common_name_for_certificate
-
-  repo= dict()
-  repo['isActive'] = "true"
-  repo['config'] = json.dumps(config_dict)
-  repo['description'] = "knox repo"
-  repo['name'] = params.repo_name
-  repo['repositoryType'] = "knox"
-  repo['assetType'] = '5'
-
-  data = json.dumps(repo)
-
-  return data
+    Logger.info('Ranger admin not installed')

+ 53 - 66
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py

@@ -24,6 +24,7 @@ from resource_management.libraries.script import Script
 from resource_management.libraries.functions import default, format
 from resource_management.libraries.functions import default, format
 import status_params
 import status_params
 import re
 import re
+import json
 
 
 def get_bare_principal(normalized_principal_name):
 def get_bare_principal(normalized_principal_name):
   """
   """
@@ -93,6 +94,8 @@ else:
 
 
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 
 
+storm_ui_host = default("/clusterHostInfo/storm_ui_server_hosts", [])
+
 if security_enabled:
 if security_enabled:
   _hostname_lowercase = config['hostname'].lower()
   _hostname_lowercase = config['hostname'].lower()
   _storm_principal_name = config['configurations']['storm-env']['storm_principal_name']
   _storm_principal_name = config['configurations']['storm-env']['storm_principal_name']
@@ -102,7 +105,6 @@ if security_enabled:
   if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
     storm_ui_keytab_path = config['configurations']['storm-env']['storm_ui_keytab']
     storm_ui_keytab_path = config['configurations']['storm-env']['storm_ui_keytab']
     _storm_ui_jaas_principal_name = config['configurations']['storm-env']['storm_ui_principal_name']
     _storm_ui_jaas_principal_name = config['configurations']['storm-env']['storm_ui_principal_name']
-    storm_ui_host = default("/clusterHostInfo/storm_ui_server_hosts", [])
     storm_ui_jaas_principal = _storm_ui_jaas_principal_name.replace('_HOST',_hostname_lowercase)
     storm_ui_jaas_principal = _storm_ui_jaas_principal_name.replace('_HOST',_hostname_lowercase)
 
 
     storm_bare_jaas_principal = get_bare_principal(_storm_principal_name)
     storm_bare_jaas_principal = get_bare_principal(_storm_principal_name)
@@ -131,80 +133,65 @@ ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
 has_ranger_admin = not len(ranger_admin_hosts) == 0
 has_ranger_admin = not len(ranger_admin_hosts) == 0
 
 
 if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
 if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
-  # setting flag value for ranger hive plugin
-  enable_ranger_storm = False
-  ranger_plugin_enable = default("/configurations/ranger-storm-plugin-properties/ranger-storm-plugin-enabled", "no")
-  if ranger_plugin_enable.lower() == 'yes':
-    enable_ranger_storm = True
-  elif ranger_plugin_enable.lower() == 'no':
-    enable_ranger_storm = False
+  enable_ranger_storm = (config['configurations']['ranger-storm-plugin-properties']['ranger-storm-plugin-enabled'].lower() == 'yes')
 
 
 ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
 ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
 
 
 #ranger storm properties
 #ranger storm properties
-policymgr_mgr_url = default("/configurations/admin-properties/policymgr_external_url", "http://localhost:6080")
-sql_connector_jar = default("/configurations/admin-properties/SQL_CONNECTOR_JAR", "/usr/share/java/mysql-connector-java.jar")
-xa_audit_db_flavor = default("/configurations/admin-properties/DB_FLAVOR", "MYSQL")
-xa_audit_db_name = default("/configurations/admin-properties/audit_db_name", "ranger_audit")
-xa_audit_db_user = default("/configurations/admin-properties/audit_db_user", "rangerlogger")
-xa_audit_db_password = default("/configurations/admin-properties/audit_db_password", "rangerlogger")
-xa_db_host = default("/configurations/admin-properties/db_host", "localhost")
+policymgr_mgr_url = config['configurations']['admin-properties']['policymgr_external_url']
+sql_connector_jar = config['configurations']['admin-properties']['SQL_CONNECTOR_JAR']
+xa_audit_db_flavor = config['configurations']['admin-properties']['DB_FLAVOR']
+xa_audit_db_name = config['configurations']['admin-properties']['audit_db_name']
+xa_audit_db_user = config['configurations']['admin-properties']['audit_db_user']
+xa_audit_db_password = config['configurations']['admin-properties']['audit_db_password']
+xa_db_host = config['configurations']['admin-properties']['db_host']
 repo_name = str(config['clusterName']) + '_storm'
 repo_name = str(config['clusterName']) + '_storm'
-db_enabled = default("/configurations/ranger-storm-plugin-properties/XAAUDIT.DB.IS_ENABLED", "false")
-hdfs_enabled = default("/configurations/ranger-storm-plugin-properties/XAAUDIT.HDFS.IS_ENABLED", "false")
-hdfs_dest_dir = default("/configurations/ranger-storm-plugin-properties/XAAUDIT.HDFS.DESTINATION_DIRECTORY", "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/app-type/time:yyyyMMdd")
-hdfs_buffer_dir = default("/configurations/ranger-storm-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY", "__REPLACE__LOG_DIR/hadoop/app-type/audit")
-hdfs_archive_dir = default("/configurations/ranger-storm-plugin-properties/XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY", "__REPLACE__LOG_DIR/hadoop/app-type/audit/archive")
-hdfs_dest_file = default("/configurations/ranger-storm-plugin-properties/XAAUDIT.HDFS.DESTINTATION_FILE", "hostname-audit.log")
-hdfs_dest_flush_int_sec = default("/configurations/ranger-storm-plugin-properties/XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS", "900")
-hdfs_dest_rollover_int_sec = default("/configurations/ranger-storm-plugin-properties/XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS", "86400")
-hdfs_dest_open_retry_int_sec = default("/configurations/ranger-storm-plugin-properties/XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS", "60")
-hdfs_buffer_file = default("/configurations/ranger-storm-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_FILE", "time:yyyyMMdd-HHmm.ss.log")
-hdfs_buffer_flush_int_sec = default("/configurations/ranger-storm-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS", "60")
-hdfs_buffer_rollover_int_sec = default("/configurations/ranger-storm-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS", "600")
-hdfs_archive_max_file_count = default("/configurations/ranger-storm-plugin-properties/XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT", "10")
-ssl_keystore_file = default("/configurations/ranger-storm-plugin-properties/SSL_KEYSTORE_FILE_PATH", "/etc/hadoop/conf/ranger-plugin-keystore.jks")
-ssl_keystore_password = default("/configurations/ranger-storm-plugin-properties/SSL_KEYSTORE_PASSWORD", "myKeyFilePassword")
-ssl_truststore_file = default("/configurations/ranger-storm-plugin-properties/SSL_TRUSTSTORE_FILE_PATH", "/etc/hadoop/conf/ranger-plugin-truststore.jks")
-ssl_truststore_password = default("/configurations/ranger-storm-plugin-properties/SSL_TRUSTSTORE_PASSWORD", "changeit")
-
-common_name_for_certificate = default("/configurations/ranger-storm-plugin-properties/common.name.for.certificate", "-")
-
-repo_config_username = default("/configurations/ranger-storm-plugin-properties/REPOSITORY_CONFIG_USERNAME", "hadoop")
-repo_config_password = default("/configurations/ranger-storm-plugin-properties/REPOSITORY_CONFIG_PASSWORD", "hadoop")
+
+common_name_for_certificate = config['configurations']['ranger-storm-plugin-properties']['common.name.for.certificate']
+
 storm_ui_port = config['configurations']['storm-site']['ui.port']
 storm_ui_port = config['configurations']['storm-site']['ui.port']
 
 
-admin_uname = default("/configurations/ranger-env/admin_username", "admin")
-admin_password = default("/configurations/ranger-env/admin_password", "admin")
-admin_uname_password = format("{admin_uname}:{admin_password}")
+repo_config_username = config['configurations']['ranger-storm-plugin-properties']['REPOSITORY_CONFIG_USERNAME']
+repo_config_password = config['configurations']['ranger-storm-plugin-properties']['REPOSITORY_CONFIG_PASSWORD']
 
 
-ambari_ranger_admin = default("/configurations/ranger-env/ranger_admin_username", "amb_ranger_admin")
-ambari_ranger_password = default("/configurations/ranger-env/ranger_admin_password", "ambari123")
-policy_user = default("/configurations/ranger-storm-plugin-properties/policy_user", "storm")
+ranger_env = config['configurations']['ranger-env']
+ranger_plugin_properties = config['configurations']['ranger-storm-plugin-properties']
+policy_user = config['configurations']['ranger-storm-plugin-properties']['policy_user']
 
 
 #For curl command in ranger plugin to get db connector
 #For curl command in ranger plugin to get db connector
 jdk_location = config['hostLevelParams']['jdk_location']
 jdk_location = config['hostLevelParams']['jdk_location']
 java_share_dir = '/usr/share/java'
 java_share_dir = '/usr/share/java'
-if xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'mysql':
-  jdbc_symlink_name = "mysql-jdbc-driver.jar"
-  jdbc_jar_name = "mysql-connector-java.jar"
-elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'oracle':
-  jdbc_jar_name = "ojdbc6.jar"
-  jdbc_symlink_name = "oracle-jdbc-driver.jar"
-elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'postgres':
-  jdbc_jar_name = "postgresql.jar"
-  jdbc_symlink_name = "postgres-jdbc-driver.jar"
-elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'sqlserver':
-  jdbc_jar_name = "sqljdbc4.jar"
-  jdbc_symlink_name = "mssql-jdbc-driver.jar"
-
-downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
-
-driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
-driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
-
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.3') >= 0:
-  solr_enabled = default("/configurations/ranger-storm-plugin-properties/XAAUDIT.SOLR.IS_ENABLED", "false")
-  solr_max_queue_size = default("/configurations/ranger-storm-plugin-properties/XAAUDIT.SOLR.MAX_QUEUE_SIZE", "1")
-  solr_max_flush_interval = default("/configurations/ranger-storm-plugin-properties/XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS", "1000")
-  solr_url = default("/configurations/ranger-storm-plugin-properties/XAAUDIT.SOLR.SOLR_URL", "http://localhost:6083/solr/ranger_audits")
+if has_ranger_admin:
+  if xa_audit_db_flavor.lower() == 'mysql':
+    jdbc_symlink_name = "mysql-jdbc-driver.jar"
+    jdbc_jar_name = "mysql-connector-java.jar"
+  elif xa_audit_db_flavor.lower() == 'oracle':
+    jdbc_jar_name = "ojdbc6.jar"
+    jdbc_symlink_name = "oracle-jdbc-driver.jar"
+  elif nxa_audit_db_flavor.lower() == 'postgres':
+    jdbc_jar_name = "postgresql.jar"
+    jdbc_symlink_name = "postgres-jdbc-driver.jar"
+  elif xa_audit_db_flavor.lower() == 'sqlserver':
+    jdbc_jar_name = "sqljdbc4.jar"
+    jdbc_symlink_name = "mssql-jdbc-driver.jar"
+
+  downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
+  
+  driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
+  driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
+
+storm_ranger_plugin_config = {
+  'username': repo_config_username,
+  'password': repo_config_password,
+  'nimbus.url': 'http://' + storm_ui_host[0].lower() + ':' + str(storm_ui_port),
+  'commonNameForCertificate': common_name_for_certificate
+}
+
+storm_ranger_plugin_repo = {
+  'isActive': 'true',
+  'config': json.dumps(storm_ranger_plugin_config),
+  'description': 'storm repo',
+  'name': repo_name,
+  'repositoryType': 'storm',
+  'assetType': '6'
+}

+ 11 - 178
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/setup_ranger_storm.py

@@ -8,7 +8,7 @@ to you under the Apache License, Version 2.0 (the
 "License"); you may not use this file except in compliance
 "License"); you may not use this file except in compliance
 with the License.  You may obtain a copy of the License at
 with the License.  You may obtain a copy of the License at
 
 
-  http://www.apache.org/licenses/LICENSE-2.0
+    http://www.apache.org/licenses/LICENSE-2.0
 
 
 Unless required by applicable law or agreed to in writing, software
 Unless required by applicable law or agreed to in writing, software
 distributed under the License is distributed on an "AS IS" BASIS,
 distributed under the License is distributed on an "AS IS" BASIS,
@@ -17,186 +17,19 @@ See the License for the specific language governing permissions and
 limitations under the License.
 limitations under the License.
 
 
 """
 """
-
-import sys
-import fileinput
-import subprocess
-import json
-import re
-import os
 from resource_management import *
 from resource_management import *
-from resource_management.core.logger import Logger
-from resource_management.libraries.functions.ranger_functions import Rangeradmin
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
 
 
 def setup_ranger_storm():
 def setup_ranger_storm():
   import params
   import params
-
+  
   if params.has_ranger_admin and params.security_enabled:
   if params.has_ranger_admin and params.security_enabled:
-    File(params.downloaded_custom_connector,
-         content = DownloadSource(params.driver_curl_source)
-    )
-
-    if not os.path.isfile(params.driver_curl_target):
-      Execute(('cp', '--remove-destination', params.downloaded_custom_connector, params.driver_curl_target),
-              path=["/bin", "/usr/bin/"],
-              sudo=True)
-
-    try:
-      command = 'hdp-select status storm-nimbus'
-      return_code, hdp_output = shell.call(command)
-    except Exception, e:
-      Logger.error(str(e))
-      raise Fail('Unable to execute hdp-select command to retrieve the version.')
-
-    if return_code != 0:
-      raise Fail('Unable to determine the current version because of a non-zero return code of {0}'.format(str(return_code)))
-
-    hdp_version = re.sub('storm-nimbus - ', '', hdp_output).strip()
-    match = re.match('[0-9]+.[0-9]+.[0-9]+.[0-9]+-[0-9]+', hdp_version)
-
-    if match is None:
-      raise Fail('Failed to get extracted version')
-
-    file_path = '/usr/hdp/'+ hdp_version +'/ranger-storm-plugin/install.properties'
-    if not os.path.isfile(file_path):
-      raise Fail('Ranger Storm plugin install.properties file does not exist at {0}'.format(file_path))
-
-    ranger_storm_dict = ranger_storm_properties()
-    storm_repo_data = storm_repo_properties()        
-
-    write_properties_to_file(file_path, ranger_storm_dict)
-
-    if params.enable_ranger_storm:            
-      cmd = format('cd /usr/hdp/{hdp_version}/ranger-storm-plugin/ && sh enable-storm-plugin.sh')
-      ranger_adm_obj = Rangeradmin(url=ranger_storm_dict['POLICY_MGR_URL'])
-      response_code, response_recieved = ranger_adm_obj.check_ranger_login_urllib2(ranger_storm_dict['POLICY_MGR_URL'] + '/login.jsp', 'test:test')
-
-      if response_code is not None and response_code == 200:
-        ambari_ranger_admin, ambari_ranger_password = ranger_adm_obj.create_ambari_admin_user(params.ambari_ranger_admin, params.ambari_ranger_password, params.admin_uname_password)
-        ambari_username_password_for_ranger = ambari_ranger_admin + ':' + ambari_ranger_password
-        if ambari_ranger_admin != '' and ambari_ranger_password != '':
-          repo = ranger_adm_obj.get_repository_by_name_urllib2(ranger_storm_dict['REPOSITORY_NAME'], 'storm', 'true', ambari_username_password_for_ranger)
-          if repo and repo['name'] == ranger_storm_dict['REPOSITORY_NAME']:
-            Logger.info('STORM Repository exist')
-          else:
-            response = ranger_adm_obj.create_repository_urllib2(storm_repo_data, ambari_username_password_for_ranger, params.policy_user)
-            if response is not None:
-              Logger.info('STORM Repository created in Ranger Admin')
-            else:
-              Logger.info('STORM Repository creation failed in Ranger Admin')
-        else:
-          Logger.info('Ambari admin username and password are blank ')
-      else:
-        Logger.info('Ranger service is not started on given host')                                        
-    else:
-      cmd = format('cd /usr/hdp/{hdp_version}/ranger-storm-plugin/ && sh disable-storm-plugin.sh')
-
-    Execute(cmd, environment={'JAVA_HOME': params.java64_home}, logoutput=True)            
+    setup_ranger_plugin('storm-nimbus', 'storm', 
+                        params.downloaded_custom_connector, params.driver_curl_source,
+                        params.driver_curl_target, params.java64_home,
+                        params.repo_name, params.storm_ranger_plugin_repo,
+                        params.ranger_env, params.ranger_plugin_properties,
+                        params.policy_user, params.policymgr_mgr_url,
+                        params.enable_ranger_storm
+    )                 
   else:
   else:
-    Logger.info('Ranger admin not installed or security is not enabled')
-
-
-def write_properties_to_file(file_path, value):
-  for key in value:
-    modify_config(file_path, key, value[key])
-
-
-def modify_config(filepath, variable, setting):
-  var_found = False
-  already_set = False
-  V=str(variable)
-  S=str(setting)
-  # use quotes if setting has spaces #
-  if ' ' in S:
-    S = '%s' % S
-
-  for line in fileinput.input(filepath, inplace = 1):
-    # process lines that look like config settings #
-    if not line.lstrip(' ').startswith('#') and '=' in line:
-      _infile_var = str(line.split('=')[0].rstrip(' '))
-      _infile_set = str(line.split('=')[1].lstrip(' ').rstrip())
-      # only change the first matching occurrence #
-      if var_found == False and _infile_var.rstrip(' ') == V:
-        var_found = True
-        # don't change it if it is already set #
-        if _infile_set.lstrip(' ') == S:
-          already_set = True
-        else:
-          line = "%s=%s\n" % (V, S)
-
-    sys.stdout.write(line)
-
-  # Append the variable if it wasn't found #
-  if not var_found:
-    with open(filepath, "a") as f:
-      f.write("%s=%s\n" % (V, S))
-  elif already_set == True:
-    pass
-  else:
-    pass
-
-  return
-
-def ranger_storm_properties():
-  import params
-
-  ranger_storm_properties = dict()
-
-  ranger_storm_properties['POLICY_MGR_URL'] = params.policymgr_mgr_url
-  ranger_storm_properties['SQL_CONNECTOR_JAR'] = params.sql_connector_jar
-  ranger_storm_properties['XAAUDIT.DB.FLAVOUR'] = params.xa_audit_db_flavor
-  ranger_storm_properties['XAAUDIT.DB.DATABASE_NAME'] = params.xa_audit_db_name
-  ranger_storm_properties['XAAUDIT.DB.USER_NAME'] = params.xa_audit_db_user
-  ranger_storm_properties['XAAUDIT.DB.PASSWORD'] = params.xa_audit_db_password
-  ranger_storm_properties['XAAUDIT.DB.HOSTNAME'] = params.xa_db_host
-  ranger_storm_properties['REPOSITORY_NAME'] = params.repo_name
-  ranger_storm_properties['XAAUDIT.DB.IS_ENABLED'] = params.db_enabled
-
-  ranger_storm_properties['XAAUDIT.HDFS.IS_ENABLED'] = params.hdfs_enabled
-  ranger_storm_properties['XAAUDIT.HDFS.DESTINATION_DIRECTORY'] = params.hdfs_dest_dir
-  ranger_storm_properties['XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY'] = params.hdfs_buffer_dir
-  ranger_storm_properties['XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY'] = params.hdfs_archive_dir
-  ranger_storm_properties['XAAUDIT.HDFS.DESTINTATION_FILE'] = params.hdfs_dest_file
-  ranger_storm_properties['XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS'] = params.hdfs_dest_flush_int_sec
-  ranger_storm_properties['XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS'] = params.hdfs_dest_rollover_int_sec
-  ranger_storm_properties['XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS'] = params.hdfs_dest_open_retry_int_sec
-  ranger_storm_properties['XAAUDIT.HDFS.LOCAL_BUFFER_FILE'] = params.hdfs_buffer_file
-  ranger_storm_properties['XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS'] = params.hdfs_buffer_flush_int_sec
-  ranger_storm_properties['XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS'] = params.hdfs_buffer_rollover_int_sec
-  ranger_storm_properties['XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT'] = params.hdfs_archive_max_file_count
-
-  ranger_storm_properties['SSL_KEYSTORE_FILE_PATH'] = params.ssl_keystore_file
-  ranger_storm_properties['SSL_KEYSTORE_PASSWORD'] = params.ssl_keystore_password
-  ranger_storm_properties['SSL_TRUSTSTORE_FILE_PATH'] = params.ssl_truststore_file
-  ranger_storm_properties['SSL_TRUSTSTORE_PASSWORD'] = params.ssl_truststore_password
-
-  if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.3') >= 0:
-    ranger_storm_properties['XAAUDIT.SOLR.IS_ENABLED'] = str(params.solr_enabled).lower()
-    ranger_storm_properties['XAAUDIT.SOLR.MAX_QUEUE_SIZE'] = params.solr_max_queue_size
-    ranger_storm_properties['XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS'] = params.solr_max_flush_interval
-    ranger_storm_properties['XAAUDIT.SOLR.SOLR_URL'] = params.solr_url
-
-  return ranger_storm_properties
-
-
-def storm_repo_properties():
-  import params
-
-  config_dict = dict()
-  config_dict['username'] = params.repo_config_username
-  config_dict['password'] = params.repo_config_password
-  config_dict['nimbus.url'] = 'http://' + params.storm_ui_host[0].lower() + ':' + str(params.storm_ui_port)
-  config_dict['commonNameForCertificate'] = params.common_name_for_certificate
-
-  repo = dict()
-  repo['isActive'] = "true"
-  repo['config'] = json.dumps(config_dict)
-  repo['description'] = "storm repo"
-  repo['name'] = params.repo_name
-  repo['repositoryType'] = "storm"
-  repo['assetType'] = '6'
-
-  data = json.dumps(repo)
-
-  return data    
+    Logger.info('Ranger admin not installed')

+ 48 - 0
ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/ranger-storm-plugin-properties.xml

@@ -152,5 +152,53 @@
     <value>changeit</value>
     <value>changeit</value>
     <description></description>
     <description></description>
   </property>
   </property>
+  
+  <property>
+    <name>POLICY_MGR_URL</name>
+    <value>{{policymgr_mgr_url}}</value>
+    <description>Policy Manager url</description>    
+  </property> 
+  
+  <property>
+    <name>SQL_CONNECTOR_JAR</name>
+    <value>{{sql_connector_jar}}</value>
+    <description>Location of DB client library (please check the location of the jar file)</description>    
+  </property> 
+  
+  <property>
+    <name>XAAUDIT.DB.FLAVOUR</name>
+    <value>{{xa_audit_db_flavor}}</value>
+    <description>The database type to be used (mysql/oracle)</description>    
+  </property> 
+  
+  <property>
+    <name>XAAUDIT.DB.DATABASE_NAME</name>
+    <value>{{xa_audit_db_name}}</value>
+    <description>Audit database name</description>    
+  </property> 
+  
+  <property>
+    <name>XAAUDIT.DB.USER_NAME</name>
+    <value>{{xa_audit_db_user}}</value>
+    <description>Audit database user</description>    
+  </property> 
+  
+  <property>
+    <name>XAAUDIT.DB.PASSWORD</name>
+    <value>{{xa_audit_db_password}}</value>
+    <description>Audit database password</description>    
+  </property>
+  
+  <property>
+    <name>XAAUDIT.DB.HOSTNAME</name>
+    <value>{{xa_db_host}}</value>
+    <description>Audit database password</description>    
+  </property>
+  
+  <property>
+    <name>REPOSITORY_NAME</name>
+    <value>{{repo_name}}</value>
+    <description>Ranger repository name</description>    
+  </property>
 
 
 </configuration>
 </configuration>

+ 35 - 2
ambari-server/src/test/python/stacks/2.1/configs/default-storm-start.json

@@ -215,8 +215,41 @@
             "storm_pid_dir": "/var/run/storm",
             "storm_pid_dir": "/var/run/storm",
             "storm_user": "storm"
             "storm_user": "storm"
         },
         },
-        "ranger-storm-plugin-properties" : {
-            "ranger-storm-plugin-enabled":"no"
+		"ranger-storm-plugin-properties": {
+            "POLICY_MGR_URL": "{{policymgr_mgr_url}}", 
+            "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900", 
+            "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%", 
+            "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit", 
+            "common.name.for.certificate": "-", 
+            "XAAUDIT.HDFS.IS_ENABLED": "false", 
+            "SQL_CONNECTOR_JAR": "{{sql_connector_jar}}", 
+            "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log", 
+            "REPOSITORY_NAME": "{{repo_name}}", 
+            "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword", 
+            "XAAUDIT.DB.IS_ENABLED": "true", 
+            "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600", 
+            "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits", 
+            "XAAUDIT.DB.DATABASE_NAME": "{{xa_audit_db_name}}", 
+            "XAAUDIT.DB.HOSTNAME": "{{xa_db_host}}", 
+            "XAAUDIT.SOLR.IS_ENABLED": "false", 
+            "ranger-storm-plugin-enabled": "Yes", 
+            "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks", 
+            "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60", 
+            "XAAUDIT.DB.USER_NAME": "{{xa_audit_db_user}}", 
+            "policy_user": "storm", 
+            "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log", 
+            "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400", 
+            "XAAUDIT.DB.PASSWORD": "{{xa_audit_db_password}}", 
+            "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10", 
+            "SSL_TRUSTSTORE_PASSWORD": "changeit", 
+            "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive", 
+            "REPOSITORY_CONFIG_USERNAME": "stormtestuser@EXAMPLE.COM", 
+            "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000", 
+            "XAAUDIT.DB.FLAVOUR": "{{xa_audit_db_flavor}}", 
+            "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60", 
+            "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks", 
+            "REPOSITORY_CONFIG_PASSWORD": "stormtestuser", 
+            "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1"
         },
         },
         "core-site": {
         "core-site": {
             "io.serializations": "org.apache.hadoop.io.serializer.WritableSerialization",
             "io.serializations": "org.apache.hadoop.io.serializer.WritableSerialization",

+ 36 - 0
ambari-server/src/test/python/stacks/2.1/configs/default.json

@@ -231,6 +231,42 @@
             "logviewer.port": "8000",
             "logviewer.port": "8000",
             "topology.debug": "false"
             "topology.debug": "false"
         },
         },
+        "ranger-storm-plugin-properties": {
+            "POLICY_MGR_URL": "{{policymgr_mgr_url}}", 
+            "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900", 
+            "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%", 
+            "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit", 
+            "common.name.for.certificate": "-", 
+            "XAAUDIT.HDFS.IS_ENABLED": "false", 
+            "SQL_CONNECTOR_JAR": "{{sql_connector_jar}}", 
+            "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log", 
+            "REPOSITORY_NAME": "{{repo_name}}", 
+            "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword", 
+            "XAAUDIT.DB.IS_ENABLED": "true", 
+            "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600", 
+            "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits", 
+            "XAAUDIT.DB.DATABASE_NAME": "{{xa_audit_db_name}}", 
+            "XAAUDIT.DB.HOSTNAME": "{{xa_db_host}}", 
+            "XAAUDIT.SOLR.IS_ENABLED": "false", 
+            "ranger-storm-plugin-enabled": "Yes", 
+            "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks", 
+            "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60", 
+            "XAAUDIT.DB.USER_NAME": "{{xa_audit_db_user}}", 
+            "policy_user": "storm", 
+            "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log", 
+            "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400", 
+            "XAAUDIT.DB.PASSWORD": "{{xa_audit_db_password}}", 
+            "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10", 
+            "SSL_TRUSTSTORE_PASSWORD": "changeit", 
+            "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive", 
+            "REPOSITORY_CONFIG_USERNAME": "stormtestuser@EXAMPLE.COM", 
+            "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000", 
+            "XAAUDIT.DB.FLAVOUR": "{{xa_audit_db_flavor}}", 
+            "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60", 
+            "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks", 
+            "REPOSITORY_CONFIG_PASSWORD": "stormtestuser", 
+            "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1"
+        },
         "webhcat-site": {
         "webhcat-site": {
             "templeton.pig.path": "pig.tar.gz/pig/bin/pig", 
             "templeton.pig.path": "pig.tar.gz/pig/bin/pig", 
             "templeton.exec.timeout": "60000", 
             "templeton.exec.timeout": "60000", 

+ 36 - 3
ambari-server/src/test/python/stacks/2.1/configs/secured-storm-start.json

@@ -226,9 +226,42 @@
             "storm_keytab": "/etc/security/keytabs/storm.service.keytab", 
             "storm_keytab": "/etc/security/keytabs/storm.service.keytab", 
             "storm_ui_principal_name": "HTTP/_HOST"
             "storm_ui_principal_name": "HTTP/_HOST"
         },
         },
-        "ranger-storm-plugin-properties" : {
-            "ranger-storm-plugin-enabled":"yes"
-        }, 
+        "ranger-storm-plugin-properties": {
+            "POLICY_MGR_URL": "{{policymgr_mgr_url}}", 
+            "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900", 
+            "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%", 
+            "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit", 
+            "common.name.for.certificate": "-", 
+            "XAAUDIT.HDFS.IS_ENABLED": "false", 
+            "SQL_CONNECTOR_JAR": "{{sql_connector_jar}}", 
+            "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log", 
+            "REPOSITORY_NAME": "{{repo_name}}", 
+            "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword", 
+            "XAAUDIT.DB.IS_ENABLED": "true", 
+            "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600", 
+            "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits", 
+            "XAAUDIT.DB.DATABASE_NAME": "{{xa_audit_db_name}}", 
+            "XAAUDIT.DB.HOSTNAME": "{{xa_db_host}}", 
+            "XAAUDIT.SOLR.IS_ENABLED": "false", 
+            "ranger-storm-plugin-enabled": "Yes", 
+            "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks", 
+            "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60", 
+            "XAAUDIT.DB.USER_NAME": "{{xa_audit_db_user}}", 
+            "policy_user": "storm", 
+            "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log", 
+            "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400", 
+            "XAAUDIT.DB.PASSWORD": "{{xa_audit_db_password}}", 
+            "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10", 
+            "SSL_TRUSTSTORE_PASSWORD": "changeit", 
+            "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive", 
+            "REPOSITORY_CONFIG_USERNAME": "stormtestuser@EXAMPLE.COM", 
+            "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000", 
+            "XAAUDIT.DB.FLAVOUR": "{{xa_audit_db_flavor}}", 
+            "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60", 
+            "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks", 
+            "REPOSITORY_CONFIG_PASSWORD": "stormtestuser", 
+            "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1"
+        },
         "core-site": {
         "core-site": {
             "io.serializations": "org.apache.hadoop.io.serializer.WritableSerialization", 
             "io.serializations": "org.apache.hadoop.io.serializer.WritableSerialization", 
             "fs.trash.interval": "360", 
             "fs.trash.interval": "360", 

+ 36 - 0
ambari-server/src/test/python/stacks/2.1/configs/secured.json

@@ -82,6 +82,42 @@
             "*.log.cleanup.frequency.minutes.retention": "hours(6)",
             "*.log.cleanup.frequency.minutes.retention": "hours(6)",
             "*.domain": "${falcon.app.type}"
             "*.domain": "${falcon.app.type}"
       },
       },
+      "ranger-storm-plugin-properties": {
+            "POLICY_MGR_URL": "{{policymgr_mgr_url}}", 
+            "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900", 
+            "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%", 
+            "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit", 
+            "common.name.for.certificate": "-", 
+            "XAAUDIT.HDFS.IS_ENABLED": "false", 
+            "SQL_CONNECTOR_JAR": "{{sql_connector_jar}}", 
+            "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log", 
+            "REPOSITORY_NAME": "{{repo_name}}", 
+            "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword", 
+            "XAAUDIT.DB.IS_ENABLED": "true", 
+            "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600", 
+            "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits", 
+            "XAAUDIT.DB.DATABASE_NAME": "{{xa_audit_db_name}}", 
+            "XAAUDIT.DB.HOSTNAME": "{{xa_db_host}}", 
+            "XAAUDIT.SOLR.IS_ENABLED": "false", 
+            "ranger-storm-plugin-enabled": "Yes", 
+            "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks", 
+            "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60", 
+            "XAAUDIT.DB.USER_NAME": "{{xa_audit_db_user}}", 
+            "policy_user": "storm", 
+            "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log", 
+            "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400", 
+            "XAAUDIT.DB.PASSWORD": "{{xa_audit_db_password}}", 
+            "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10", 
+            "SSL_TRUSTSTORE_PASSWORD": "changeit", 
+            "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive", 
+            "REPOSITORY_CONFIG_USERNAME": "stormtestuser@EXAMPLE.COM", 
+            "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000", 
+            "XAAUDIT.DB.FLAVOUR": "{{xa_audit_db_flavor}}", 
+            "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60", 
+            "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks", 
+            "REPOSITORY_CONFIG_PASSWORD": "stormtestuser", 
+            "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1"
+        },
       "mapred-site": {
       "mapred-site": {
             "mapreduce.jobhistory.address": "c6402.ambari.apache.org:10020", 
             "mapreduce.jobhistory.address": "c6402.ambari.apache.org:10020", 
             "mapreduce.jobhistory.webapp.spnego-keytab-file": "/etc/security/keytabs/spnego.service.keytab", 
             "mapreduce.jobhistory.webapp.spnego-keytab-file": "/etc/security/keytabs/spnego.service.keytab", 

+ 37 - 0
ambari-server/src/test/python/stacks/2.2/configs/default.json

@@ -187,6 +187,43 @@
             "kerberos_domain": "EXAMPLE.COM",
             "kerberos_domain": "EXAMPLE.COM",
             "user_group": "hadoop"
             "user_group": "hadoop"
         },
         },
+        "ranger-knox-plugin-properties": {
+            "POLICY_MGR_URL": "{{policymgr_mgr_url}}", 
+            "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900", 
+            "KNOX_HOME": "/usr/hdp/current/knox-server", 
+            "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%", 
+            "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit", 
+            "common.name.for.certificate": "-", 
+            "XAAUDIT.HDFS.IS_ENABLED": "false", 
+            "SQL_CONNECTOR_JAR": "{{sql_connector_jar}}", 
+            "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log", 
+            "REPOSITORY_NAME": "{{repo_name}}", 
+            "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword", 
+            "XAAUDIT.DB.IS_ENABLED": "true", 
+            "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600", 
+            "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60", 
+            "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits", 
+            "XAAUDIT.DB.DATABASE_NAME": "{{xa_audit_db_name}}", 
+            "XAAUDIT.DB.HOSTNAME": "{{xa_db_host}}", 
+            "XAAUDIT.SOLR.IS_ENABLED": "false", 
+            "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks", 
+            "ranger-knox-plugin-enabled": "Yes", 
+            "XAAUDIT.DB.USER_NAME": "{{xa_audit_db_user}}", 
+            "policy_user": "ambari-qa", 
+            "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log", 
+            "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400", 
+            "XAAUDIT.DB.PASSWORD": "{{xa_audit_db_password}}", 
+            "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10", 
+            "SSL_TRUSTSTORE_PASSWORD": "changeit", 
+            "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive", 
+            "REPOSITORY_CONFIG_USERNAME": "admin", 
+            "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000", 
+            "XAAUDIT.DB.FLAVOUR": "{{xa_audit_db_flavor}}", 
+            "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60", 
+            "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks", 
+            "REPOSITORY_CONFIG_PASSWORD": "admin-password", 
+            "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1"
+        },
         "webhcat-site": {
         "webhcat-site": {
             "templeton.jar": "/usr/hdp/current/hive-webhcat/share/webhcat/svr/lib/hive-webhcat-*.jar",
             "templeton.jar": "/usr/hdp/current/hive-webhcat/share/webhcat/svr/lib/hive-webhcat-*.jar",
             "templeton.pig.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/pig.tar.gz",
             "templeton.pig.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/pig.tar.gz",