Browse Source

AMBARI-13534. Derived properties when Ranger plugin is enabled should be recommended by stack advisor. (jaimin)

Jaimin Jetly 10 years ago
parent
commit
3864bc161e
17 changed files with 659 additions and 466 deletions
  1. 6 0
      ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/configuration/kafka-log4j.xml
  2. 6 0
      ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/configuration/topology.xml
  3. 77 0
      ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/configuration/hbase-site.xml
  4. 12 0
      ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-site.xml
  5. 93 11
      ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
  6. 15 0
      ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hdfs-site.xml
  7. 13 0
      ambari-server/src/main/resources/stacks/HDP/2.3/services/KAFKA/configuration/kafka-broker.xml
  8. 24 0
      ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-site.xml
  9. 89 6
      ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
  10. 120 8
      ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
  11. 204 0
      ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
  12. 0 107
      ambari-web/app/utils/configs/modification_handlers/hbase.js
  13. 0 55
      ambari-web/app/utils/configs/modification_handlers/hdfs.js
  14. 0 71
      ambari-web/app/utils/configs/modification_handlers/kafka.js
  15. 0 67
      ambari-web/app/utils/configs/modification_handlers/knox.js
  16. 0 70
      ambari-web/app/utils/configs/modification_handlers/storm.js
  17. 0 71
      ambari-web/app/utils/configs/modification_handlers/yarn.js

+ 6 - 0
ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/configuration/kafka-log4j.xml

@@ -114,6 +114,12 @@ log4j.additivity.state.change.logger=false
     <value-attributes>
       <show-property-name>false</show-property-name>
     </value-attributes>
+    <depends-on>
+      <property>
+        <type>ranger-kafka-plugin-properties</type>
+        <name>ranger-kafka-plugin-enabled</name>
+      </property>
+    </depends-on>
   </property>
 
 </configuration>

+ 6 - 0
ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/configuration/topology.xml

@@ -122,5 +122,11 @@
        <empty-value-valid>true</empty-value-valid>
        <show-property-name>false</show-property-name>
     </value-attributes>
+        <depends-on>
+            <property>
+                <type>ranger-knox-plugin-properties</type>
+                <name>ranger-knox-plugin-enabled</name>
+            </property>
+        </depends-on>
     </property>
 </configuration>

+ 77 - 0
ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/configuration/hbase-site.xml

@@ -230,4 +230,81 @@
       <increment-step>0.01</increment-step>
     </value-attributes>
   </property>
+  <property>
+    <name>hbase.coprocessor.master.classes</name>
+    <value></value>
+    <description>A comma-separated list of
+      org.apache.hadoop.hbase.coprocessor.MasterObserver coprocessors that are
+      loaded by default on the active HMaster process. For any implemented
+      coprocessor methods, the listed classes will be called in order. After
+      implementing your own MasterObserver, just put it in HBase's classpath
+      and add the fully qualified class name here.
+    </description>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <depends-on>
+      <property>
+        <type>hbase-site</type>
+        <name>hbase.security.authorization</name>
+      </property>
+      <property>
+        <type>ranger-hbase-plugin-properties</type>
+        <name>ranger-hbase-plugin-enabled</name>
+      </property>
+    </depends-on>
+  </property>
+  <property>
+    <name>hbase.coprocessor.region.classes</name>
+    <value>org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint</value>
+    <description>A comma-separated list of Coprocessors that are loaded by
+      default on all tables. For any override coprocessor method, these classes
+      will be called in order. After implementing your own Coprocessor, just put
+      it in HBase's classpath and add the fully qualified class name here.
+      A coprocessor can also be loaded on demand by setting HTableDescriptor.
+    </description>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <depends-on>
+      <property>
+        <type>hbase-site</type>
+        <name>hbase.security.authorization</name>
+      </property>
+      <property>
+        <type>hbase-site</type>
+        <name>hbase.security.authentication</name>
+      </property>
+      <property>
+        <type>ranger-hbase-plugin-properties</type>
+        <name>ranger-hbase-plugin-enabled</name>
+      </property>
+    </depends-on>
+  </property>
+  <property>
+    <name>hbase.security.authorization</name>
+    <value>false</value>
+    <description> Set Authorization Method.</description>
+    <display-name>Enable Authorization</display-name>
+    <value-attributes>
+      <type>value-list</type>
+      <entries>
+        <entry>
+          <value>true</value>
+          <label>Native</label>
+        </entry>
+        <entry>
+          <value>false</value>
+          <label>Off</label>
+        </entry>
+      </entries>
+      <selection-cardinality>1</selection-cardinality>
+    </value-attributes>
+    <depends-on>
+      <property>
+        <type>ranger-hbase-plugin-properties</type>
+        <name>ranger-hbase-plugin-enabled</name>
+      </property>
+    </depends-on>
+  </property>
 </configuration>

+ 12 - 0
ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-site.xml

@@ -109,4 +109,16 @@
     <value>{{log_dir}}</value>
     <description>Log directory for Storm.</description>
   </property>
+
+  <property>
+    <name>nimbus.authorizer</name>
+    <value>backtype.storm.security.auth.authorizer.SimpleACLAuthorizer</value>
+    <description>Log directory for Storm.</description>
+    <depends-on>
+      <property>
+        <type>ranger-storm-plugin-properties</type>
+        <name>ranger-storm-plugin-enabled</name>
+      </property>
+    </depends-on>
+  </property>
 </configuration>

+ 93 - 11
ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py

@@ -23,6 +23,7 @@ from urlparse import urlparse
 import os
 import fnmatch
 import socket
+import re
 
 class HDP22StackAdvisor(HDP21StackAdvisor):
 
@@ -569,13 +570,20 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
               ('hbase-site' in services['configurations'] and 'phoenix.functions.allowUserDefinedFunctions' in services['configurations']["hbase-site"]["properties"]):
         putHbaseSitePropertyAttributes('phoenix.functions.allowUserDefinedFunctions', 'delete', 'true')
 
-    servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
-    if 'ranger-hbase-plugin-properties' in services['configurations'] and ('ranger-hbase-plugin-enabled' in services['configurations']['ranger-hbase-plugin-properties']['properties']):
+    if "ranger-env" in services["configurations"] and "ranger-hbase-plugin-properties" in services["configurations"] and \
+        "ranger-hbase-plugin-enabled" in services["configurations"]["ranger-env"]["properties"]:
+      putHbaseRangerPluginProperty = self.putProperty(configurations, "ranger-hbase-plugin-properties", services)
+      rangerEnvHbasePluginProperty = services["configurations"]["ranger-env"]["properties"]["ranger-hbase-plugin-enabled"]
+      putHbaseRangerPluginProperty("ranger-hbase-plugin-enabled", rangerEnvHbasePluginProperty)
+
+    rangerPluginEnabled = ''
+    if 'ranger-hbase-plugin-properties' in configurations and 'ranger-hbase-plugin-enabled' in  configurations['ranger-hbase-plugin-properties']['properties']:
+      rangerPluginEnabled = configurations['ranger-hbase-plugin-properties']['properties']['ranger-hbase-plugin-enabled']
+    elif 'ranger-hbase-plugin-properties' in services['configurations'] and 'ranger-hbase-plugin-enabled' in services['configurations']['ranger-hbase-plugin-properties']['properties']:
       rangerPluginEnabled = services['configurations']['ranger-hbase-plugin-properties']['properties']['ranger-hbase-plugin-enabled']
-      if ("RANGER" in servicesList) and (rangerPluginEnabled.lower() == "Yes".lower()):
-        putHbaseSiteProperty("hbase.security.authorization", 'true')
-        putHbaseSiteProperty("hbase.coprocessor.master.classes", 'com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor')
-        putHbaseSiteProperty("hbase.coprocessor.region.classes", 'com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor')
+
+    if rangerPluginEnabled and rangerPluginEnabled.lower() == 'Yes'.lower():
+      putHbaseSiteProperty('hbase.security.authorization','true')
 
     # Recommend configs for bucket cache
     threshold = 23 # 2 Gb is reserved for other offheap memory
@@ -670,11 +678,38 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
     [uniqueCoprocessorRegionClassList.append(i) for i in coprocessorRegionClassList if not uniqueCoprocessorRegionClassList.count(i)]
     putHbaseSiteProperty('hbase.coprocessor.region.classes', ','.join(set(uniqueCoprocessorRegionClassList)))
 
-    if "ranger-env" in services["configurations"] and "ranger-hbase-plugin-properties" in services["configurations"] and \
-        "ranger-hbase-plugin-enabled" in services["configurations"]["ranger-env"]["properties"]:
-      putHbaseRangerPluginProperty = self.putProperty(configurations, "ranger-hbase-plugin-properties", services)
-      rangerEnvHbasePluginProperty = services["configurations"]["ranger-env"]["properties"]["ranger-hbase-plugin-enabled"]
-      putHbaseRangerPluginProperty("ranger-hbase-plugin-enabled", rangerEnvHbasePluginProperty)
+    stackVersion = services["Versions"]["stack_version"]
+
+    if stackVersion == '2.2':
+      rangerClass = 'com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor'
+    else:
+      rangerClass = 'org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor'
+
+    nonRangerClass = 'org.apache.hadoop.hbase.security.access.AccessController'
+    hbaseClassConfigs = ['hbase.coprocessor.master.classes', 'hbase.coprocessor.region.classes']
+
+    for item in range(len(hbaseClassConfigs)):
+      if hbaseClassConfigs[item] in services['configurations']['hbase-site']['properties']:
+        if 'hbase-site' in configurations and hbaseClassConfigs[item] in configurations['hbase-site']['properties']:
+          coprocessorConfig = configurations['hbase-site']['properties'][hbaseClassConfigs[item]]
+        else:
+          coprocessorConfig = services['configurations']['hbase-site']['properties'][hbaseClassConfigs[item]]
+        coprocessorClasses = coprocessorConfig.split(",")
+        coprocessorClasses = filter(None, coprocessorClasses) # Removes empty string elements from array
+        if rangerPluginEnabled and rangerPluginEnabled.lower() == 'Yes'.lower():
+          if nonRangerClass in coprocessorClasses:
+            coprocessorClasses.remove(nonRangerClass)
+          if not rangerClass in coprocessorClasses:
+            coprocessorClasses.append(rangerClass)
+          putHbaseSiteProperty(hbaseClassConfigs[item], ','.join(coprocessorClasses))
+        elif rangerPluginEnabled and rangerPluginEnabled.lower() == 'No'.lower():
+          if rangerClass in coprocessorClasses:
+            coprocessorClasses.remove(rangerClass)
+            if not nonRangerClass in coprocessorClasses:
+              coprocessorClasses.append(nonRangerClass)
+            putHbaseSiteProperty(hbaseClassConfigs[item], ','.join(coprocessorClasses))
+      elif rangerPluginEnabled and rangerPluginEnabled.lower() == 'Yes'.lower():
+        putHbaseSiteProperty(hbaseClassConfigs[item], rangerClass)
 
 
   def recommendTezConfigurations(self, configurations, clusterData, services, hosts):
@@ -732,12 +767,36 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
 
 
   def recommendStormConfigurations(self, configurations, clusterData, services, hosts):
+    putStormSiteProperty = self.putProperty(configurations, "storm-site", services)
+    putStormSiteAttributes = self.putPropertyAttribute(configurations, "storm-site")
+    core_site = services["configurations"]["core-site"]["properties"]
+    stackVersion = services["Versions"]["stack_version"]
     if "ranger-env" in services["configurations"] and "ranger-storm-plugin-properties" in services["configurations"] and \
         "ranger-storm-plugin-enabled" in services["configurations"]["ranger-env"]["properties"]:
       putStormRangerPluginProperty = self.putProperty(configurations, "ranger-storm-plugin-properties", services)
       rangerEnvStormPluginProperty = services["configurations"]["ranger-env"]["properties"]["ranger-storm-plugin-enabled"]
       putStormRangerPluginProperty("ranger-storm-plugin-enabled", rangerEnvStormPluginProperty)
 
+    rangerPluginEnabled = ''
+    if 'ranger-storm-plugin-properties' in configurations and 'ranger-storm-plugin-enabled' in  configurations['ranger-storm-plugin-properties']['properties']:
+      rangerPluginEnabled = configurations['ranger-storm-plugin-properties']['properties']['ranger-storm-plugin-enabled']
+    elif 'ranger-storm-plugin-properties' in services['configurations'] and 'ranger-storm-plugin-enabled' in services['configurations']['ranger-storm-plugin-properties']['properties']:
+      rangerPluginEnabled = services['configurations']['ranger-storm-plugin-properties']['properties']['ranger-storm-plugin-enabled']
+
+    nonRangerClass = 'backtype.storm.security.auth.authorizer.SimpleACLAuthorizer'
+    if stackVersion == '2.2':
+      rangerClass = 'com.xasecure.authorization.storm.authorizer.XaSecureStormAuthorizer'
+    else:
+      rangerClass = 'org.apache.ranger.authorization.storm.authorizer.RangerStormAuthorizer'
+    # Cluster is kerberized
+    if ('hadoop.security.authentication' in core_site and core_site['hadoop.security.authentication'] == 'kerberos'):
+      if rangerPluginEnabled and (rangerPluginEnabled.lower() == 'Yes'.lower()):
+        putStormSiteProperty('nimbus.authorizer',rangerClass)
+      elif (services["configurations"]["storm-site"]["properties"]["nimbus.authorizer"] == rangerClass):
+        putStormSiteProperty('nimbus.authorizer', nonRangerClass)
+    else:
+      putStormSiteAttributes('nimbus.authorizer', 'delete', 'true')
+
   def recommendKnoxConfigurations(self, configurations, clusterData, services, hosts):
     if "ranger-env" in services["configurations"] and "ranger-knox-plugin-properties" in services["configurations"] and \
         "ranger-knox-plugin-enabled" in services["configurations"]["ranger-env"]["properties"]:
@@ -745,6 +804,29 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
       rangerEnvKnoxPluginProperty = services["configurations"]["ranger-env"]["properties"]["ranger-knox-plugin-enabled"]
       putKnoxRangerPluginProperty("ranger-knox-plugin-enabled", rangerEnvKnoxPluginProperty)
 
+    if 'topology' in services["configurations"] and 'content' in services["configurations"]["topology"]["properties"]:
+      putKnoxTopologyContent = self.putProperty(configurations, "topology", services)
+      rangerPluginEnabled = ''
+      if 'ranger-knox-plugin-properties' in configurations and 'ranger-knox-plugin-enabled' in  configurations['ranger-knox-plugin-properties']['properties']:
+        rangerPluginEnabled = configurations['ranger-knox-plugin-properties']['properties']['ranger-knox-plugin-enabled']
+      elif 'ranger-knox-plugin-properties' in services['configurations'] and 'ranger-knox-plugin-enabled' in services['configurations']['ranger-knox-plugin-properties']['properties']:
+        rangerPluginEnabled = services['configurations']['ranger-knox-plugin-properties']['properties']['ranger-knox-plugin-enabled']
+      topologyContent = services["configurations"]["topology"]["properties"]["content"]
+      authPattern = "<provider>\s*<role>\s*authorization\s*</role>[\s\S]*?</provider>"
+      authXml = re.search(authPattern, topologyContent)
+
+      if authXml.group(0):
+        authNamePattern = "<name>\s*(.*?)\s*</name>"
+        authName = re.search(authNamePattern, authXml.group(0))
+        newAuthName=''
+        if authName.group(1) == 'AclsAuthz' and rangerPluginEnabled and rangerPluginEnabled.lower() == "Yes".lower():
+          newAuthName = authName.group(0).replace('AclsAuthz', 'XASecurePDPKnox')
+        elif ((not rangerPluginEnabled) or rangerPluginEnabled.lower() != "Yes".lower()) and authName.group(1) == 'XASecurePDPKnox':
+          newAuthName = authName.group(0).replace('XASecurePDPKnox', 'AclsAuthz')
+        if newAuthName:
+          newAuthxml = authXml.group(0).replace(authName.group(0), newAuthName)
+          newTopologyXmlContent = topologyContent.replace(authXml.group(0), newAuthxml)
+          putKnoxTopologyContent('content', newTopologyXmlContent)
 
 
   def getServiceConfigurationValidators(self):

+ 15 - 0
ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hdfs-site.xml

@@ -54,4 +54,19 @@
     </description>
   </property>
 
+  <property>
+    <name>dfs.namenode.inode.attributes.provider.class</name>
+    <value>org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizer</value>
+    <description>Enable ranger hdfs plugin</description>
+    <depends-on>
+      <property>
+        <type>ranger-hdfs-plugin-properties</type>
+        <name>ranger-hdfs-plugin-enabled</name>
+      </property>
+    </depends-on>
+    <value-attributes>
+      <overridable>false</overridable>
+    </value-attributes>
+  </property>
+
 </configuration>

+ 13 - 0
ambari-server/src/main/resources/stacks/HDP/2.3/services/KAFKA/configuration/kafka-broker.xml

@@ -138,4 +138,17 @@
       These metrics would be included even if the exclude prefix omits them.
     </description>
   </property>
+  <property>
+    <name>authorizer.class.name</name>
+    <value>kafka.security.auth.SimpleAclAuthorizer</value>
+    <description>
+      Kafka authorizer class
+    </description>
+    <depends-on>
+      <property>
+        <type>ranger-kafka-plugin-properties</type>
+        <name>ranger-kafka-plugin-enabled</name>
+      </property>
+    </depends-on>
+  </property>
 </configuration>

+ 24 - 0
ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-site.xml

@@ -31,6 +31,30 @@
     <value>true</value>
   </property>
 
+  <property>
+    <name>yarn.acl.enable</name>
+    <value>false</value>
+    <description> Are acls enabled. </description>
+    <depends-on>
+      <property>
+        <type>ranger-yarn-plugin-properties</type>
+        <name>ranger-yarn-plugin-enabled</name>
+      </property>
+    </depends-on>
+  </property>
+
+  <property>
+    <name>yarn.authorization-provider</name>
+    <value>org.apache.ranger.authorization.yarn.authorizer.RangerYarnAuthorizer</value>
+    <description> Yarn authorization provider class. </description>
+    <depends-on>
+      <property>
+        <type>ranger-yarn-plugin-properties</type>
+        <name>ranger-yarn-plugin-enabled</name>
+      </property>
+    </depends-on>
+  </property>
+
   <property>
     <name>yarn.admin.acl</name>
     <value>yarn</value>

+ 89 - 6
ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py

@@ -238,14 +238,27 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
     super(HDP23StackAdvisor, self).recommendHDFSConfigurations(configurations, clusterData, services, hosts)
 
     putHdfsSiteProperty = self.putProperty(configurations, "hdfs-site", services)
-    servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
+    putHdfsSitePropertyAttribute = self.putPropertyAttribute(configurations, "hdfs-site")
+
     if ('ranger-hdfs-plugin-properties' in services['configurations']) and ('ranger-hdfs-plugin-enabled' in services['configurations']['ranger-hdfs-plugin-properties']['properties']):
-      rangerPluginEnabled = services['configurations']['ranger-hdfs-plugin-properties']['properties']['ranger-hdfs-plugin-enabled']
-      if ("RANGER" in servicesList) and (rangerPluginEnabled.lower() == 'Yes'.lower()):
+      rangerPluginEnabled = ''
+      if 'ranger-hdfs-plugin-properties' in configurations and 'ranger-hdfs-plugin-enabled' in  configurations['ranger-hdfs-plugin-properties']['properties']:
+        rangerPluginEnabled = configurations['ranger-hdfs-plugin-properties']['properties']['ranger-hdfs-plugin-enabled']
+      elif 'ranger-hdfs-plugin-properties' in services['configurations'] and 'ranger-hdfs-plugin-enabled' in services['configurations']['ranger-hdfs-plugin-properties']['properties']:
+        rangerPluginEnabled = services['configurations']['ranger-hdfs-plugin-properties']['properties']['ranger-hdfs-plugin-enabled']
+
+      if rangerPluginEnabled and (rangerPluginEnabled.lower() == 'Yes'.lower()):
         putHdfsSiteProperty("dfs.namenode.inode.attributes.provider.class",'org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizer')
+      else:
+        putHdfsSitePropertyAttribute('dfs.namenode.inode.attributes.provider.class', 'delete', 'true')
+    else:
+      putHdfsSitePropertyAttribute('dfs.namenode.inode.attributes.provider.class', 'delete', 'true')
 
   def recommendKAFKAConfigurations(self, configurations, clusterData, services, hosts):
+    core_site = services["configurations"]["core-site"]["properties"]
     putKafkaBrokerProperty = self.putProperty(configurations, "kafka-broker", services)
+    putKafkaLog4jProperty = self.putProperty(configurations, "kafka-log4j", services)
+    putKafkaBrokerAttributes = self.putPropertyAttribute(configurations, "kafka-broker")
 
     if "ranger-env" in services["configurations"] and "ranger-kafka-plugin-properties" in services["configurations"] and \
         "ranger-kafka-plugin-enabled" in services["configurations"]["ranger-env"]["properties"]:
@@ -253,11 +266,68 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
       rangerEnvKafkaPluginProperty = services["configurations"]["ranger-env"]["properties"]["ranger-kafka-plugin-enabled"]
       putKafkaRangerPluginProperty("ranger-kafka-plugin-enabled", rangerEnvKafkaPluginProperty)
 
-    servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
     if 'ranger-kafka-plugin-properties' in services['configurations'] and ('ranger-kafka-plugin-enabled' in services['configurations']['ranger-kafka-plugin-properties']['properties']):
-      rangerPluginEnabled = services['configurations']['ranger-kafka-plugin-properties']['properties']['ranger-kafka-plugin-enabled']
-      if ("RANGER" in servicesList) and (rangerPluginEnabled.lower() == "Yes".lower()):
+      kafkaLog4jRangerLines = [{
+        "name": "log4j.appender.rangerAppender",
+        "value": "org.apache.log4j.DailyRollingFileAppender"
+        },
+        {
+          "name": "log4j.appender.rangerAppender.DatePattern",
+          "value": "'.'yyyy-MM-dd-HH"
+        },
+        {
+          "name": "log4j.appender.rangerAppender.File",
+          "value": "${kafka.logs.dir}/ranger_kafka.log"
+        },
+        {
+          "name": "log4j.appender.rangerAppender.layout",
+          "value": "org.apache.log4j.PatternLayout"
+        },
+        {
+          "name": "log4j.appender.rangerAppender.layout.ConversionPattern",
+          "value": "%d{ISO8601} %p [%t] %C{6} (%F:%L) - %m%n"
+        },
+        {
+          "name": "log4j.logger.org.apache.ranger",
+          "value": "INFO, rangerAppender"
+        }]
+
+      rangerPluginEnabled=''
+      if 'ranger-kafka-plugin-properties' in configurations and 'ranger-kafka-plugin-enabled' in  configurations['ranger-kafka-plugin-properties']['properties']:
+        rangerPluginEnabled = configurations['ranger-kafka-plugin-properties']['properties']['ranger-kafka-plugin-enabled']
+      elif 'ranger-kafka-plugin-properties' in services['configurations'] and 'ranger-kafka-plugin-enabled' in services['configurations']['ranger-kafka-plugin-properties']['properties']:
+        rangerPluginEnabled = services['configurations']['ranger-kafka-plugin-properties']['properties']['ranger-kafka-plugin-enabled']
+
+      if  rangerPluginEnabled and rangerPluginEnabled.lower() == "Yes".lower():
+        # recommend authorizer.class.name
         putKafkaBrokerProperty("authorizer.class.name", 'org.apache.ranger.authorization.kafka.authorizer.RangerKafkaAuthorizer')
+        # change kafka-log4j when ranger plugin is installed
+
+        if 'kafka-log4j' in services['configurations'] and 'content' in services['configurations']['kafka-log4j']['properties']:
+          kafkaLog4jContent = services['configurations']['kafka-log4j']['properties']['content']
+          for item in range(len(kafkaLog4jRangerLines)):
+            if kafkaLog4jRangerLines[item]["name"] not in kafkaLog4jContent:
+              kafkaLog4jContent+= '\n' + kafkaLog4jRangerLines[item]["name"] + '=' + kafkaLog4jRangerLines[item]["value"]
+          putKafkaLog4jProperty("content",kafkaLog4jContent)
+
+
+      else:
+      # Cluster is kerberized
+        if 'hadoop.security.authentication' in core_site and core_site['hadoop.security.authentication'] == 'kerberos' and \
+          services['configurations']['kafka-broker']['properties']['authorizer.class.name'] == 'org.apache.ranger.authorization.kafka.authorizer.RangerKafkaAuthorizer':
+          putKafkaBrokerProperty("authorizer.class.name", 'kafka.security.auth.SimpleAclAuthorizer')
+        else:
+          putKafkaBrokerAttributes('authorizer.class.name', 'delete', 'true')
+      # Cluster with Ranger is not kerberized
+    elif ('hadoop.security.authentication' not in core_site or core_site['hadoop.security.authentication'] != 'kerberos'):
+      putKafkaBrokerAttributes('authorizer.class.name', 'delete', 'true')
+
+
+
+    # Cluster without Ranger is not kerberized
+    elif ('hadoop.security.authentication' not in core_site or core_site['hadoop.security.authentication'] != 'kerberos'):
+      putKafkaBrokerAttributes('authorizer.class.name', 'delete', 'true')
+
 
   def recommendRangerConfigurations(self, configurations, clusterData, services, hosts):
     super(HDP23StackAdvisor, self).recommendRangerConfigurations(configurations, clusterData, services, hosts)
@@ -370,11 +440,24 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
 
   def recommendYARNConfigurations(self, configurations, clusterData, services, hosts):
     super(HDP23StackAdvisor, self).recommendYARNConfigurations(configurations, clusterData, services, hosts)
+    putYarnSiteProperty = self.putProperty(configurations, "yarn-site", services)
+    putYarnSitePropertyAttributes = self.putPropertyAttribute(configurations, "yarn-site")
     if "ranger-env" in services["configurations"] and "ranger-yarn-plugin-properties" in services["configurations"] and \
         "ranger-yarn-plugin-enabled" in services["configurations"]["ranger-env"]["properties"]:
       putYarnRangerPluginProperty = self.putProperty(configurations, "ranger-yarn-plugin-properties", services)
       rangerEnvYarnPluginProperty = services["configurations"]["ranger-env"]["properties"]["ranger-yarn-plugin-enabled"]
       putYarnRangerPluginProperty("ranger-yarn-plugin-enabled", rangerEnvYarnPluginProperty)
+    rangerPluginEnabled = ''
+    if 'ranger-yarn-plugin-properties' in configurations and 'ranger-yarn-plugin-enabled' in  configurations['ranger-yarn-plugin-properties']['properties']:
+      rangerPluginEnabled = configurations['ranger-yarn-plugin-properties']['properties']['ranger-yarn-plugin-enabled']
+    elif 'ranger-yarn-plugin-properties' in services['configurations'] and 'ranger-yarn-plugin-enabled' in services['configurations']['ranger-yarn-plugin-properties']['properties']:
+      rangerPluginEnabled = services['configurations']['ranger-yarn-plugin-properties']['properties']['ranger-yarn-plugin-enabled']
+
+    if rangerPluginEnabled and (rangerPluginEnabled.lower() == 'Yes'.lower()):
+      putYarnSiteProperty('yarn.acl.enable','true')
+      putYarnSiteProperty('yarn.authorization-provider','org.apache.ranger.authorization.yarn.authorizer.RangerYarnAuthorizer')
+    else:
+      putYarnSitePropertyAttributes('yarn.authorization-provider', 'delete', 'true')
 
   def getServiceConfigurationValidators(self):
       parentValidators = super(HDP23StackAdvisor, self).getServiceConfigurationValidators()

+ 120 - 8
ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py

@@ -2271,6 +2271,9 @@ class TestHDP22StackAdvisor(TestCase):
     services = {
       "services" : [
       ],
+      "Versions": {
+        "stack_version": "2.2"
+      },
       "configurations": {
         "hbase-env": {
           "properties": {
@@ -2285,7 +2288,13 @@ class TestHDP22StackAdvisor(TestCase):
             "hbase.bucketcache.ioengine": "",
             "hbase.bucketcache.size": "",
             "hbase.bucketcache.percentage.in.combinedcache": "",
-            "hbase.coprocessor.regionserver.classes": ""
+            "hbase.coprocessor.regionserver.classes": "",
+            "hbase.coprocessor.region.classes": ""
+          }
+        },
+        "ranger-hbase-plugin-properties": {
+          "properties": {
+            "ranger-hbase-plugin-enabled" : "No"
           }
         }
       }
@@ -2331,7 +2340,7 @@ class TestHDP22StackAdvisor(TestCase):
 
     # Test when phoenix_sql_enabled = true
     self.stackAdvisor.recommendHBASEConfigurations(configurations, clusterData, services, None)
-    self.assertEquals(configurations, expected)
+    self.assertEquals(configurations, expected, "Test when Phoenix sql is enabled")
 
     # Test when phoenix_sql_enabled = false
     services['configurations']['hbase-env']['properties']['phoenix_sql_enabled'] = 'false'
@@ -2340,7 +2349,7 @@ class TestHDP22StackAdvisor(TestCase):
     expected['hbase-site']['property_attributes']['hbase.coprocessor.regionserver.classes'] = {'delete': 'true'}
     expected['hbase-site']['property_attributes']['phoenix.functions.allowUserDefinedFunctions'] = {'delete': 'true'}
     self.stackAdvisor.recommendHBASEConfigurations(configurations, clusterData, services, None)
-    self.assertEquals(configurations, expected)
+    self.assertEquals(configurations, expected, "Test when Phoenix sql is disabled")
 
     # Test hbase_master_heapsize maximum
     hosts['items'][0]['Hosts']['host_name'] = 'host1'
@@ -2375,27 +2384,53 @@ class TestHDP22StackAdvisor(TestCase):
     expected['hbase-site']['property_attributes']['phoenix.functions.allowUserDefinedFunctions'] = {'delete': 'true'}
     expected['hbase-env']['property_attributes']['hbase_master_heapsize'] = {'maximum': '49152'}
     self.stackAdvisor.recommendHBASEConfigurations(configurations, clusterData, services, hosts)
-    self.assertEquals(configurations, expected)
+    self.assertEquals(configurations, expected, "Test with Phoenix disabled")
 
     # Test when hbase.security.authentication = kerberos
     services['configurations']['hbase-site']['properties']['hbase.security.authentication'] = 'kerberos'
     expected['hbase-site']['properties']['hbase.coprocessor.region.classes'] = 'org.apache.hadoop.hbase.security.token.TokenProvider,org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint'
     self.stackAdvisor.recommendHBASEConfigurations(configurations, clusterData, services, None)
-    self.assertEquals(configurations, expected)
+    self.assertEquals(configurations, expected, "Test with Kerberos enabled")
 
     # Test when hbase.security.authentication = simple
     services['configurations']['hbase-site']['properties']['hbase.security.authentication'] = 'simple'
     expected['hbase-site']['properties']['hbase.coprocessor.region.classes'] = 'org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint'
     self.stackAdvisor.recommendHBASEConfigurations(configurations, clusterData, services, None)
-    self.assertEquals(configurations, expected)
+    self.assertEquals(configurations, expected, "Test with Kerberos disabled")
+
+    # Test when Ranger plugin HBase is enabled in non-kerberos environment
+    configurations['hbase-site']['properties'].pop('hbase.coprocessor.region.classes', None)
+    configurations['hbase-site']['properties'].pop('hbase.coprocessor.master.classes', None)
+    configurations['hbase-site']['properties'].pop('hbase.coprocessor.regionserver.classes', None)
+    services['configurations']['ranger-hbase-plugin-properties']['properties']['ranger-hbase-plugin-enabled'] = 'Yes'
+    services['configurations']['hbase-site']['properties']['hbase.security.authentication'] = 'simple'
+    services['configurations']['hbase-site']['properties']['hbase.security.authorization'] = 'false'
+    services['configurations']['hbase-site']['properties']['hbase.coprocessor.region.classes'] = ''
+    services['configurations']['hbase-site']['properties']['hbase.coprocessor.master.classes'] = ''
+
+    expected['hbase-site']['properties']['hbase.security.authorization'] = "true"
+    expected['hbase-site']['properties']['hbase.coprocessor.region.classes'] = 'org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint,com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor'
+    expected['hbase-site']['properties']['hbase.coprocessor.master.classes'] = 'com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor'
+    expected['hbase-site']['properties']['hbase.coprocessor.regionserver.classes'] = 'org.apache.hadoop.hbase.security.access.AccessController'
+    self.stackAdvisor.recommendHBASEConfigurations(configurations, clusterData, services, None)
+    self.assertEquals(configurations, expected)     #"Test when Ranger plugin HBase is enabled in non-kerberos environment"
 
     # Test when hbase.security.authentication = kerberos AND class already there
     configurations['hbase-site']['properties'].pop('hbase.coprocessor.region.classes', None)
+    configurations['hbase-site']['properties'].pop('hbase.coprocessor.master.classes', None)
+    configurations['hbase-site']['properties'].pop('hbase.coprocessor.regionserver.classes', None)
+    configurations['hbase-site']['properties'].pop('hbase.security.authorization', None)
+    services['configurations']['ranger-hbase-plugin-properties']['properties']['ranger-hbase-plugin-enabled'] = 'No'
+    services['configurations']['hbase-site']['properties']['hbase.security.authorization'] = 'false'
     services['configurations']['hbase-site']['properties']['hbase.security.authentication'] = 'kerberos'
+    services['configurations']['hbase-site']['properties']['hbase.coprocessor.master.classes'] = ''
     services['configurations']['hbase-site']['properties']['hbase.coprocessor.region.classes'] = 'a.b.c.d'
     expected['hbase-site']['properties']['hbase.coprocessor.region.classes'] = 'a.b.c.d,org.apache.hadoop.hbase.security.token.TokenProvider,org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint'
+    expected['hbase-site']['properties']['hbase.coprocessor.master.classes'] = ''
+    del expected['hbase-site']['properties']['hbase.security.authorization']
+    del expected['hbase-site']['properties']['hbase.coprocessor.regionserver.classes']
     self.stackAdvisor.recommendHBASEConfigurations(configurations, clusterData, services, None)
-    self.assertEquals(configurations, expected)
+    self.assertEquals(configurations, expected, "Test with Kerberos enabled and hbase.coprocessor.region.classes predefined")
 
     # Test when hbase.security.authentication = kerberos AND authorization = true
     configurations['hbase-site']['properties'].pop('hbase.coprocessor.region.classes', None)
@@ -2406,7 +2441,20 @@ class TestHDP22StackAdvisor(TestCase):
     expected['hbase-site']['properties']['hbase.coprocessor.region.classes'] = 'org.apache.hadoop.hbase.security.access.AccessController,org.apache.hadoop.hbase.security.token.TokenProvider,org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint'
     expected['hbase-site']['properties']['hbase.coprocessor.regionserver.classes'] = "org.apache.hadoop.hbase.security.access.AccessController"
     self.stackAdvisor.recommendHBASEConfigurations(configurations, clusterData, services, None)
-    self.assertEquals(configurations, expected)
+    self.assertEquals(configurations, expected, "Test with Kerberos enabled and authorization is true")
+
+    # Test when Ranger plugin HBase is enabled in kerberos environment
+    configurations['hbase-site']['properties'].pop('hbase.coprocessor.region.classes', None)
+    services['configurations']['hbase-site']['properties']['hbase.coprocessor.region.classes'] = ''
+    services['configurations']['hbase-site']['properties']['hbase.coprocessor.master.classes'] = ''
+    services['configurations']['hbase-site']['properties']['hbase.security.authentication'] = 'kerberos'
+    services['configurations']['hbase-site']['properties']['hbase.security.authorization'] = 'false'
+    services['configurations']['ranger-hbase-plugin-properties']['properties']['ranger-hbase-plugin-enabled'] = 'Yes'
+    expected['hbase-site']['properties']['hbase.security.authorization']  = 'true'
+    expected['hbase-site']['properties']['hbase.coprocessor.master.classes'] = 'com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor'
+    expected['hbase-site']['properties']['hbase.coprocessor.region.classes'] = 'org.apache.hadoop.hbase.security.token.TokenProvider,org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint,com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor'
+    self.stackAdvisor.recommendHBASEConfigurations(configurations, clusterData, services, None)
+    self.assertEquals(configurations, expected, "Test with Kerberos enabled and HBase ranger plugin enabled")
 
     # Test - default recommendations should have certain configs deleted. HAS TO BE LAST TEST.
     services["configurations"] = {"hbase-site": {"properties": {"phoenix.functions.allowUserDefinedFunctions": '', "hbase.rpc.controllerfactory.class": ''}}}
@@ -2417,6 +2465,70 @@ class TestHDP22StackAdvisor(TestCase):
     self.assertEquals(configurations['hbase-site']['properties']['hbase.regionserver.wal.codec'], "org.apache.hadoop.hbase.regionserver.wal.WALCellCodec")
 
 
+  def test_recommendStormConfigurations(self):
+    configurations = {}
+    clusterData = {}
+    services = {
+      "services":
+        [
+          {
+            "StackServices": {
+              "service_name" : "STORM",
+              "service_version" : "2.6.0.2.2"
+            }
+          }
+        ],
+      "Versions": {
+        "stack_version": "2.2"
+      },
+      "configurations": {
+        "core-site": {
+          "properties": { },
+        },
+        "storm-site": {
+          "properties": {
+            "nimbus.authorizer" : "backtype.storm.security.auth.authorizer.SimpleACLAuthorizer"
+          },
+          "property_attributes": {}
+        },
+        "ranger-storm-plugin-properties": {
+          "properties": {
+            "ranger-storm-plugin-enabled": "No"
+          }
+        }
+      }
+    }
+
+    # Test nimbus.authorizer with Ranger Storm plugin disabled in non-kerberos environment
+    self.stackAdvisor.recommendStormConfigurations(configurations, clusterData, services, None)
+    self.assertEquals(configurations['storm-site']['property_attributes']['nimbus.authorizer'], {'delete': 'true'}, "Test nimbus.authorizer with Ranger Storm plugin disabled in non-kerberos environment")
+
+    # Test nimbus.authorizer with Ranger Storm plugin enabled in non-kerberos environment
+    configurations['storm-site']['properties'] = {}
+    configurations['storm-site']['property_attributes'] = {}
+    services['configurations']['ranger-storm-plugin-properties']['properties']['ranger-storm-plugin-enabled'] = 'Yes'
+    self.stackAdvisor.recommendStormConfigurations(configurations, clusterData, services, None)
+    self.assertEquals(configurations['storm-site']['property_attributes']['nimbus.authorizer'], {'delete': 'true'}, "Test nimbus.authorizer with Ranger Storm plugin enabled in non-kerberos environment")
+
+    # Test nimbus.authorizer with Ranger Storm plugin being enabled in kerberos environment
+    configurations['storm-site']['properties'] = {}
+    configurations['storm-site']['property_attributes'] = {}
+    services['configurations']['storm-site']['properties']['nimbus.authorizer'] = ''
+    services['configurations']['ranger-storm-plugin-properties']['properties']['ranger-storm-plugin-enabled'] = 'Yes'
+    services['configurations']['core-site']['properties']['hadoop.security.authentication'] = 'kerberos'
+    self.stackAdvisor.recommendStormConfigurations(configurations, clusterData, services, None)
+    self.assertEquals(configurations['storm-site']['properties']['nimbus.authorizer'], 'com.xasecure.authorization.storm.authorizer.XaSecureStormAuthorizer', "Test nimbus.authorizer with Ranger Storm plugin enabled in kerberos environment")
+
+    # Test nimbus.authorizer with Ranger Storm plugin being disabled in kerberos environment
+    configurations['storm-site']['properties'] = {}
+    configurations['storm-site']['property_attributes'] = {}
+    services['configurations']['ranger-storm-plugin-properties']['properties']['ranger-storm-plugin-enabled'] = 'No'
+    services['configurations']['core-site']['properties']['hadoop.security.authentication'] = 'kerberos'
+    services['configurations']['storm-site']['properties']['nimbus.authorizer'] = 'com.xasecure.authorization.storm.authorizer.XaSecureStormAuthorizer'
+    self.stackAdvisor.recommendStormConfigurations(configurations, clusterData, services, None)
+    self.assertEquals(configurations['storm-site']['properties']['nimbus.authorizer'], 'backtype.storm.security.auth.authorizer.SimpleACLAuthorizer', "Test nimbus.authorizer with Ranger Storm plugin being disabled in kerberos environment")
+
+
   def test_recommendHDFSConfigurations(self):
     configurations = {
       'ranger-hdfs-plugin-properties':{

+ 204 - 0
ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py

@@ -71,6 +71,207 @@ class TestHDP23StackAdvisor(TestCase):
     open_mock.return_value = MagicFile()
     return self.get_system_min_uid_real()
 
+  def test_recommendHDFSConfigurations(self):
+    configurations = {}
+    clusterData = {
+      "totalAvailableRam": 2048,
+      "hBaseInstalled": True,
+      "hbaseRam": 112,
+      "reservedRam": 128
+    }
+    services = {
+      "services":
+        [
+          {
+            "StackServices": {
+              "service_name" : "HDFS",
+              "service_version" : "2.6.0.2.2"
+            }
+          }
+        ],
+      "Versions": {
+        "stack_version": "2.3"
+      },
+      "configurations": {
+        "hdfs-site": {
+          "properties": {
+            "dfs.namenode.inode.attributes.provider.class": "org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizer"
+          }
+        },
+        "ranger-hdfs-plugin-properties": {
+          "properties": {
+            "ranger-hdfs-plugin-enabled": "No"
+          }
+        }
+      }
+    }
+
+    # Test with Ranger HDFS plugin disabled
+    self.stackAdvisor.recommendHDFSConfigurations(configurations, clusterData, services, None)
+    self.assertEquals(configurations['hdfs-site']['property_attributes']['dfs.namenode.inode.attributes.provider.class'], {'delete': 'true'}, "Test with Ranger HDFS plugin is disabled")
+
+    # Test with Ranger HDFS plugin is enabled
+    configurations['hdfs-site']['properties'] = {}
+    configurations['hdfs-site']['property_attributes'] = {}
+    services['configurations']['ranger-hdfs-plugin-properties']['properties']['ranger-hdfs-plugin-enabled'] = 'Yes'
+    self.stackAdvisor.recommendHDFSConfigurations(configurations, clusterData, services, None)
+    self.assertEquals(configurations['hdfs-site']['properties']['dfs.namenode.inode.attributes.provider.class'], 'org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizer', "Test with Ranger HDFS plugin is enabled")
+
+  def test_recommendYARNConfigurations(self):
+    configurations = {}
+    servicesList = ["YARN"]
+    components = []
+    hosts = {
+      "items" : [
+        {
+          "Hosts" : {
+            "cpu_count" : 6,
+            "total_mem" : 50331648,
+            "disk_info" : [
+              {"mountpoint" : "/"},
+              {"mountpoint" : "/dev/shm"},
+              {"mountpoint" : "/vagrant"},
+              {"mountpoint" : "/"},
+              {"mountpoint" : "/dev/shm"},
+              {"mountpoint" : "/vagrant"}
+            ],
+            "public_host_name" : "c6401.ambari.apache.org",
+            "host_name" : "c6401.ambari.apache.org"
+          }
+        }
+      ]
+    }
+    services = {
+      "services" : [ {
+        "StackServices":{
+          "service_name": "YARN",
+        },
+        "Versions": {
+          "stack_version": "2.3"
+        },
+        "components": [
+          {
+            "StackServiceComponents": {
+              "component_name": "NODEMANAGER",
+              "hostnames": ["c6401.ambari.apache.org"]
+            }
+          }
+        ]
+      }
+      ],
+      "configurations": {
+        "yarn-site": {
+          "properties": {
+            "yarn.authorization-provider": "org.apache.ranger.authorization.yarn.authorizer.RangerYarnAuthorizer"
+          }
+        },
+        "ranger-yarn-plugin-properties": {
+          "properties": {
+            "ranger-yarn-plugin-enabled": "No"
+          }
+        }
+      }
+    }
+
+    clusterData = self.stackAdvisor.getConfigurationClusterSummary(servicesList, hosts, components, None)
+    # Test with Ranger YARN plugin disabled
+    self.stackAdvisor.recommendYARNConfigurations(configurations, clusterData, services, None)
+    self.assertEquals(configurations['yarn-site']['property_attributes']['yarn.authorization-provider'], {'delete': 'true'}, "Test with Ranger HDFS plugin is disabled")
+
+    # Test with Ranger YARN plugin is enabled
+    configurations['yarn-site']['properties'] = {}
+    configurations['yarn-site']['property_attributes'] = {}
+    services['configurations']['ranger-yarn-plugin-properties']['properties']['ranger-yarn-plugin-enabled'] = 'Yes'
+    self.stackAdvisor.recommendYARNConfigurations(configurations, clusterData, services, None)
+    self.assertEquals(configurations['yarn-site']['properties']['yarn.authorization-provider'], 'org.apache.ranger.authorization.yarn.authorizer.RangerYarnAuthorizer', "Test with Ranger YARN plugin enabled")
+
+
+  def test_recommendKAFKAConfigurations(self):
+    configurations = {}
+    clusterData = {
+      "totalAvailableRam": 2048,
+      "hBaseInstalled": True,
+      "hbaseRam": 112,
+      "reservedRam": 128
+    }
+    services = {
+      "services":
+        [
+          {
+            "StackServices": {
+              "service_name" : "KAFKA",
+              "service_version" : "2.6.0.2.2"
+            }
+          }
+        ],
+      "Versions": {
+        "stack_version": "2.3"
+      },
+      "configurations": {
+        "core-site": {
+          "properties": { },
+        },
+        "kafka-broker": {
+          "properties": {
+            "authorizer.class.name" : "kafka.security.auth.SimpleAclAuthorizer"
+          },
+          "property_attributes": {}
+        },
+        "ranger-kafka-plugin-properties": {
+          "properties": {
+            "ranger-kafka-plugin-enabled": "No"
+          }
+        },
+        "kafka-log4j": {
+          "properties": {
+            "content": "kafka.logs.dir=logs"
+          }
+        }
+      }
+    }
+
+    # Test authorizer.class.name with Ranger Kafka plugin disabled in non-kerberos environment
+    self.stackAdvisor.recommendKAFKAConfigurations(configurations, clusterData, services, None)
+    self.assertEquals(configurations['kafka-broker']['property_attributes']['authorizer.class.name'], {'delete': 'true'}, "Test authorizer.class.name with Ranger Kafka plugin is disabled in non-kerberos environment")
+
+    # Test authorizer.class.name with Ranger Kafka plugin disabled in kerberos environment
+    configurations['kafka-broker']['properties'] = {}
+    configurations['kafka-broker']['property_attributes'] = {}
+    services['configurations']['core-site']['properties']['hadoop.security.authentication'] = 'kerberos'
+    services['configurations']['kafka-broker']['properties']['authorizer.class.name'] = 'org.apache.ranger.authorization.kafka.authorizer.RangerKafkaAuthorizer'
+    self.stackAdvisor.recommendKAFKAConfigurations(configurations, clusterData, services, None)
+    self.assertEquals(configurations['kafka-broker']['properties']['authorizer.class.name'], 'kafka.security.auth.SimpleAclAuthorizer' , "Test authorizer.class.name with Ranger Kafka plugin disabled in kerberos environment")
+
+    # Test authorizer.class.name with Ranger Kafka plugin enabled in non-kerberos environment
+    configurations['kafka-broker']['properties'] = {}
+    configurations['kafka-broker']['property_attributes'] = {}
+    del services['configurations']['core-site']['properties']['hadoop.security.authentication']
+    services['configurations']['kafka-broker']['properties']['authorizer.class.name'] = 'kafka.security.auth.SimpleAclAuthorizer'
+    services['configurations']['ranger-kafka-plugin-properties']['properties']['ranger-kafka-plugin-enabled'] = 'Yes'
+    self.stackAdvisor.recommendKAFKAConfigurations(configurations, clusterData, services, None)
+    self.assertEquals(configurations['kafka-broker']['properties']['authorizer.class.name'], 'org.apache.ranger.authorization.kafka.authorizer.RangerKafkaAuthorizer', "Test authorizer.class.name with Ranger Kafka plugin enabled in kerberos environment")
+
+    # Test authorizer.class.name with Ranger Kafka plugin enabled in kerberos environment
+    configurations['kafka-broker']['properties'] = {}
+    configurations['kafka-broker']['property_attributes'] = {}
+    services['configurations']['core-site']['properties']['hadoop.security.authentication'] = 'kerberos'
+    services['configurations']['kafka-broker']['properties']['authorizer.class.name'] = 'kafka.security.auth.SimpleAclAuthorizer'
+    services['configurations']['ranger-kafka-plugin-properties']['properties']['ranger-kafka-plugin-enabled'] = 'Yes'
+    self.stackAdvisor.recommendKAFKAConfigurations(configurations, clusterData, services, None)
+    self.assertEquals(configurations['kafka-broker']['properties']['authorizer.class.name'], 'org.apache.ranger.authorization.kafka.authorizer.RangerKafkaAuthorizer', "Test authorizer.class.name with Ranger Kafka plugin enabled in kerberos environment")
+
+    # Test kafka-log4j content when Ranger plugin for Kafka is enabled
+
+    self.stackAdvisor.recommendKAFKAConfigurations(configurations, clusterData, services, None)
+    log4jContent = services['configurations']['kafka-log4j']['properties']['content']
+    newRangerLog4content = "\nlog4j.appender.rangerAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.rangerAppender.DatePattern='.'yyyy-MM-dd-HH\n" \
+                     "log4j.appender.rangerAppender.File=${kafka.logs.dir}/ranger_kafka.log\nlog4j.appender.rangerAppender.layout" \
+                     "=org.apache.log4j.PatternLayout\nlog4j.appender.rangerAppender.layout.ConversionPattern=%d{ISO8601} %p [%t] %C{6} (%F:%L) - %m%n\n" \
+                     "log4j.logger.org.apache.ranger=INFO, rangerAppender"
+    expectedLog4jContent = log4jContent + newRangerLog4content
+    self.assertEquals(configurations['kafka-log4j']['properties']['content'], expectedLog4jContent, "Test kafka-log4j content when Ranger plugin for Kafka is enabled")
+
+
   def test_recommendHBASEConfigurations(self):
     configurations = {}
     clusterData = {
@@ -201,6 +402,9 @@ class TestHDP23StackAdvisor(TestCase):
             },
             ],
           }],
+      "Versions": {
+        "stack_version": "2.3"
+      },
       "configurations": {
         "yarn-site": {
           "properties": {

+ 0 - 107
ambari-web/app/utils/configs/modification_handlers/hbase.js

@@ -1,107 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-
-var App = require('app');
-require('utils/configs/modification_handlers/modification_handler');
-
-module.exports = App.ServiceConfigModificationHandler.create({
-  serviceId : 'HBASE',
-
-  updateConfigClasses : function(configClasses, authEnabled, affectedProperties, addOldValue) {
-    if (configClasses != null) {
-      var xaAuthCoProcessorClass = App.get('isHadoop23Stack') ? "org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor"
-        : "com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor";
-      var nonXAClass = 'org.apache.hadoop.hbase.security.access.AccessController';
-      var currentClassesList = configClasses.get('value').trim().length > 0 ? configClasses.get('value').trim().split(',') : [];
-      var newClassesList = null, xaClassIndex, nonXaClassIndex;
-
-      if (authEnabled) {
-        var nonXaClassIndex = currentClassesList.indexOf(nonXAClass);
-        if (nonXaClassIndex > -1) {
-          currentClassesList.splice(nonXaClassIndex, 1);
-          newClassesList = currentClassesList;
-        }
-        var xaClassIndex = currentClassesList.indexOf(xaAuthCoProcessorClass);
-        if (xaClassIndex < 0) {
-          currentClassesList.push(xaAuthCoProcessorClass);
-          newClassesList = currentClassesList;
-        }
-      } else {
-        var xaClassIndex = currentClassesList.indexOf(xaAuthCoProcessorClass);
-        if (xaClassIndex > -1) {
-          currentClassesList.splice(xaClassIndex, 1);
-          newClassesList = currentClassesList;
-        }
-        if (addOldValue) {
-          var nonXaClassIndex = currentClassesList.indexOf(nonXAClass);
-          if (nonXaClassIndex < 0) {
-            currentClassesList.push(nonXAClass);
-            newClassesList = currentClassesList;
-          }
-        }
-      }
-
-      if (newClassesList != null) {
-        affectedProperties.push({
-          serviceName : "HBASE",
-          sourceServiceName : "HBASE",
-          propertyName : configClasses.get('name'),
-          propertyDisplayName : configClasses.get('name'),
-          newValue : newClassesList.join(','),
-          curValue : configClasses.get('value'),
-          changedPropertyName : 'ranger-hbase-plugin-enabled',
-          removed : false,
-          filename : 'hbase-site.xml'
-        });
-      }
-    }
-  },
-
-  getDependentConfigChanges : function(changedConfig, selectedServices, allConfigs, securityEnabled) {
-    var affectedProperties = [];
-    var newValue = changedConfig.get("value");
-    var hbaseAuthEnabledPropertyName = "ranger-hbase-plugin-enabled";
-    var affectedPropertyName = changedConfig.get("name");
-    if (affectedPropertyName == hbaseAuthEnabledPropertyName) {
-      var configAuthEnabled = this.getConfig(allConfigs, 'hbase.security.authorization', 'hbase-site.xml', 'HBASE');
-      var configMasterClasses = this.getConfig(allConfigs, 'hbase.coprocessor.master.classes', 'hbase-site.xml', 'HBASE');
-      var configRegionClasses = this.getConfig(allConfigs, 'hbase.coprocessor.region.classes', 'hbase-site.xml', 'HBASE');
-
-      var authEnabled = newValue == "Yes";
-      var newAuthEnabledValue = authEnabled ? "true" : "false";
-      var newRpcProtectionValue = authEnabled ? "privacy" : "authentication";
-
-      // Add HBase-Ranger configs
-      this.updateConfigClasses(configMasterClasses, authEnabled, affectedProperties, configAuthEnabled.get('value') == 'true');
-      this.updateConfigClasses(configRegionClasses, authEnabled, affectedProperties, configAuthEnabled.get('value') == 'true');
-      if (authEnabled && newAuthEnabledValue !== configAuthEnabled.get('value')) {
-        affectedProperties.push({
-          serviceName : "HBASE",
-          sourceServiceName : "HBASE",
-          propertyName : 'hbase.security.authorization',
-          propertyDisplayName : 'hbase.security.authorization',
-          newValue : newAuthEnabledValue,
-          curValue : configAuthEnabled.get('value'),
-          changedPropertyName : hbaseAuthEnabledPropertyName,
-          removed : false,
-          filename : 'hbase-site.xml'
-        });
-      }
-    }
-    return affectedProperties;
-  }
-});

+ 0 - 55
ambari-web/app/utils/configs/modification_handlers/hdfs.js

@@ -1,55 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-
-var App = require('app');
-require('utils/configs/modification_handlers/modification_handler');
-
-module.exports = App.ServiceConfigModificationHandler.create({
-  serviceId : 'HDFS',
-
-  getDependentConfigChanges : function(changedConfig, selectedServices, allConfigs, securityEnabled) {
-    var affectedProperties = [];
-    var newValue = changedConfig.get("value");
-    var rangerPluginEnabledName = "ranger-hdfs-plugin-enabled";
-    var affectedPropertyName = changedConfig.get("name");
-    if (App.get('isHadoop23Stack') && affectedPropertyName == rangerPluginEnabledName) {
-      var configAttributesProviderClass = this.getConfig(allConfigs, 'dfs.namenode.inode.attributes.provider.class', 'hdfs-site.xml', 'HDFS');
-      var isAttributesProviderClassSet = typeof configAttributesProviderClass !== 'undefined';
-
-      var rangerPluginEnabled = newValue == "Yes";
-      var newDfsPermissionsEnabled = rangerPluginEnabled ? "true" : "false";
-      var newAttributesProviderClass = 'org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizer';
-
-      if (rangerPluginEnabled && (!isAttributesProviderClassSet || newAttributesProviderClass != configAttributesProviderClass.get('value'))) {
-        affectedProperties.push({
-          serviceName : "HDFS",
-          sourceServiceName : "HDFS",
-          propertyName : 'dfs.namenode.inode.attributes.provider.class',
-          propertyDisplayName : 'dfs.namenode.inode.attributes.provider.class',
-          newValue : newAttributesProviderClass,
-          curValue : isAttributesProviderClassSet ? configAttributesProviderClass.get('value') : '',
-          changedPropertyName : rangerPluginEnabledName,
-          removed : false,
-          isNewProperty : !isAttributesProviderClassSet,
-          filename : 'hdfs-site.xml',
-          categoryName: 'Custom hdfs-site'
-        });
-      }
-    }
-    return affectedProperties;
-  }
-});

+ 0 - 71
ambari-web/app/utils/configs/modification_handlers/kafka.js

@@ -1,71 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-
-var App = require('app');
-require('utils/configs/modification_handlers/modification_handler');
-
-module.exports = App.ServiceConfigModificationHandler.create({
-  serviceId: 'KAFKA',
-
-  getDependentConfigChanges: function (changedConfig, selectedServices, allConfigs) {
-    var rangerPluginEnabledName = "ranger-kafka-plugin-enabled";
-    var affectedProperties = [];
-    var affectedPropertyName = changedConfig.get("name");
-    var authorizerClassName, kafkaLog4jContent, newLog4jContentValue;
-    var isEnabling = changedConfig.get('value') === 'Yes';
-
-    if (affectedPropertyName === rangerPluginEnabledName) {
-      authorizerClassName = this.getConfig(allConfigs, 'authorizer.class.name', 'kafka-broker.xml', 'KAFKA');
-      kafkaLog4jContent = this.getConfig(allConfigs, 'content', 'kafka-log4j.xml', 'KAFKA');
-      newLog4jContentValue = kafkaLog4jContent.get('value');
-      newLog4jContentValue += "\n\nlog4j.appender.rangerAppender=org.apache.log4j.DailyRollingFileAppender\n" +
-      "log4j.appender.rangerAppender.DatePattern='.'yyyy-MM-dd-HH\n" +
-      "log4j.appender.rangerAppender.File=${kafka.logs.dir}/ranger_kafka.log\n" +
-      "log4j.appender.rangerAppender.layout=org.apache.log4j.PatternLayout\n" +
-      "log4j.appender.rangerAppender.layout.ConversionPattern=%d{ISO8601} %p [%t] %C{6} (%F:%L) - %m%n\n" +
-      "log4j.logger.org.apache.ranger=INFO, rangerAppender";
-
-      affectedProperties = [
-        {
-          serviceName: "KAFKA",
-          sourceServiceName: "KAFKA",
-          propertyName: 'authorizer.class.name',
-          propertyDisplayName: 'authorizer.class.name',
-          newValue: isEnabling ? 'org.apache.ranger.authorization.kafka.authorizer.RangerKafkaAuthorizer' :
-              App.StackConfigProperty.find().findProperty('name', 'authorizer.class.name').get('value'),
-          curValue: authorizerClassName.get('value'),
-          changedPropertyName: rangerPluginEnabledName,
-          removed: false,
-          filename: 'kafka-broker.xml'
-        },
-        {
-          serviceName: "KAFKA",
-          sourceServiceName: "KAFKA",
-          propertyName: 'content',
-          propertyDisplayName: 'content',
-          newValue: isEnabling ? newLog4jContentValue : App.StackConfigProperty.find().filterProperty('filename', 'kafka-log4j.xml').findProperty('name', 'content').get('value'),
-          curValue: kafkaLog4jContent.get('value'),
-          changedPropertyName: rangerPluginEnabledName,
-          removed: false,
-          filename: 'kafka-log4j.xml'
-        }
-      ];
-    }
-
-    return affectedProperties;
-  }
-});

+ 0 - 67
ambari-web/app/utils/configs/modification_handlers/knox.js

@@ -1,67 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-
-var App = require('app');
-require('utils/configs/modification_handlers/modification_handler');
-
-module.exports = App.ServiceConfigModificationHandler.create({
-  serviceId : 'KNOX',
-
-  getDependentConfigChanges : function(changedConfig, selectedServices, allConfigs, securityEnabled) {
-    var affectedProperties = [];
-    var newValue = changedConfig.get("value");
-    var rangerPluginEnablePropertyName = "ranger-knox-plugin-enabled";
-    var affectedPropertyName = changedConfig.get("name");
-    if (affectedPropertyName == rangerPluginEnablePropertyName) {
-      var topologyXmlContent = this.getConfig(allConfigs, 'content', 'topology.xml', 'KNOX');
-      if (topologyXmlContent != null) {
-        var topologyXmlContentString = topologyXmlContent.get('value');
-        var newTopologyXmlContentString = null;
-        var authEnabled = newValue == "Yes";
-        var authXml = /<provider>[\s]*<role>[\s]*authorization[\s]*<\/role>[\s\S]*?<\/provider>/.exec(topologyXmlContentString);
-        if (authXml != null && authXml.length > 0) {
-          var nameArray = /<name>\s*(.*?)\s*<\/name>/.exec(authXml[0]);
-          if (nameArray != null && nameArray.length > 1) {
-            if (authEnabled && 'AclsAuthz' == nameArray[1]) {
-              var newName = nameArray[0].replace('AclsAuthz', 'XASecurePDPKnox');
-              var newAuthXml = authXml[0].replace(nameArray[0], newName);
-              newTopologyXmlContentString = topologyXmlContentString.replace(authXml[0], newAuthXml);
-            } else if (!authEnabled && 'XASecurePDPKnox' == nameArray[1]) {
-              var newName = nameArray[0].replace('XASecurePDPKnox', 'AclsAuthz');
-              var newAuthXml = authXml[0].replace(nameArray[0], newName);
-              newTopologyXmlContentString = topologyXmlContentString.replace(authXml[0], newAuthXml);
-            }
-          }
-        }
-        if (newTopologyXmlContentString != null) {
-          affectedProperties.push({
-            serviceName : "KNOX",
-            sourceServiceName : "KNOX",
-            propertyName : 'content',
-            propertyDisplayName : 'content',
-            newValue : newTopologyXmlContentString,
-            curValue : topologyXmlContent.get('value'),
-            changedPropertyName : rangerPluginEnablePropertyName,
-            removed : false,
-            filename : 'topology.xml'
-          });
-        }
-      }
-    }
-    return affectedProperties;
-  }
-});

+ 0 - 70
ambari-web/app/utils/configs/modification_handlers/storm.js

@@ -1,70 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-
-var App = require('app');
-require('utils/configs/modification_handlers/modification_handler');
-
-module.exports = App.ServiceConfigModificationHandler.create({
-  serviceId : 'STORM',
-
-  getDependentConfigChanges : function(changedConfig, selectedServices, allConfigs, securityEnabled) {
-    var affectedProperties = [];
-    var newValue = changedConfig.get("value");
-    var rangerPluginEnablePropertyName = "ranger-storm-plugin-enabled";
-    var affectedPropertyName = changedConfig.get("name");
-    if (affectedPropertyName == rangerPluginEnablePropertyName) {
-      var authEnabled = newValue == "Yes";
-      var configNimbusAuthorizer = this.getConfig(allConfigs, 'nimbus.authorizer', 'storm-site.xml', 'STORM');
-      if (configNimbusAuthorizer != null) {
-        // Only when configuration is already present, do we act on it.
-        // Unsecured clusters do not have this config, and hence we skip any
-        // updates
-        var newNimbusAuthorizer = authEnabled ? (App.get('isHadoop23Stack') ? "org.apache.ranger.authorization.storm.authorizer.RangerStormAuthorizer"
-              : "com.xasecure.authorization.storm.authorizer.XaSecureStormAuthorizer")
-            : "backtype.storm.security.auth.authorizer.SimpleACLAuthorizer";
-
-        // Add Storm-Ranger configs
-        if (newNimbusAuthorizer !== configNimbusAuthorizer.get('value')) {
-          affectedProperties.push({
-            serviceName : "STORM",
-            sourceServiceName : "STORM",
-            propertyName : 'nimbus.authorizer',
-            propertyDisplayName : 'nimbus.authorizer',
-            newValue : newNimbusAuthorizer,
-            curValue : configNimbusAuthorizer.get('value'),
-            changedPropertyName : rangerPluginEnablePropertyName,
-            removed : false,
-            filename : 'storm-site.xml'
-          });
-        }
-      }
-      if (authEnabled && affectedProperties.length < 1 && !securityEnabled) {
-        App.ModalPopup.show({
-          header : Em.I18n.t('services.storm.configs.range-plugin-enable.dialog.title'),
-          primary : Em.I18n.t('ok'),
-          secondary : false,
-          showCloseButton : false,
-          onPrimary : function() {
-            this.hide();
-          },
-          body : Em.I18n.t('services.storm.configs.range-plugin-enable.dialog.message')
-        });
-      }
-    }
-    return affectedProperties;
-  }
-});

+ 0 - 71
ambari-web/app/utils/configs/modification_handlers/yarn.js

@@ -1,71 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * 'License'); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an 'AS IS' BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-
-var App = require('app');
-require('utils/configs/modification_handlers/modification_handler');
-
-module.exports = App.ServiceConfigModificationHandler.create({
-  serviceId: 'YARN',
-
-  getDependentConfigChanges: function (changedConfig, selectedServices, allConfigs, securityEnabled) {
-    var affectedProperties = [],
-      newValue = changedConfig.get('value'),
-      rangerPluginEnabledName = 'ranger-yarn-plugin-enabled',
-      affectedPropertyName = changedConfig.get('name');
-    if (affectedPropertyName == rangerPluginEnabledName) {
-      var configYarnAclEnable = this.getConfig(allConfigs, 'yarn.acl.enable', 'yarn-site.xml', 'YARN'),
-        configAuthorizationProviderClass = this.getConfig(allConfigs, 'yarn.authorization-provider', 'yarn-site.xml', 'YARN'),
-        isAuthorizationProviderClassNotSet = typeof configAuthorizationProviderClass === 'undefined',
-        rangerPluginEnabled = newValue == 'Yes',
-        newYarnAclEnable = 'true',
-        newAuthorizationProviderClass = 'org.apache.ranger.authorization.yarn.authorizer.RangerYarnAuthorizer';
-
-      // Add YARN-Ranger configs
-      if (rangerPluginEnabled) {
-        if (configYarnAclEnable != null && newYarnAclEnable !== configYarnAclEnable.get('value')) {
-          affectedProperties.push({
-            serviceName: 'YARN',
-            sourceServiceName: 'YARN',
-            propertyName: 'yarn.acl.enable',
-            propertyDisplayName: 'yarn.acl.enable',
-            newValue: newYarnAclEnable,
-            curValue: configYarnAclEnable.get('value'),
-            changedPropertyName: rangerPluginEnabledName,
-            removed: false,
-            filename: 'yarn-site.xml'
-          });
-        }
-        if (isAuthorizationProviderClassNotSet || newAuthorizationProviderClass !== configAuthorizationProviderClass.get('value')) {
-          affectedProperties.push({
-            serviceName: 'YARN',
-            sourceServiceName: 'YARN',
-            propertyName: 'yarn.authorization-provider',
-            propertyDisplayName: 'yarn.authorization-provider',
-            newValue: newAuthorizationProviderClass,
-            curValue: isAuthorizationProviderClassNotSet ? '': configAuthorizationProviderClass.get('value'),
-            changedPropertyName: rangerPluginEnabledName,
-            removed: false,
-            isNewProperty: isAuthorizationProviderClassNotSet,
-            filename: 'yarn-site.xml',
-            categoryName: 'Custom yarn-site'
-          });
-        }
-      }
-    }
-    return affectedProperties;
-  }
-});