Browse Source

AMBARI-11614 : Stack Advisor changes needed for Ranger Kafka Plugin (jluniya)

Jayush Luniya 10 years ago
parent
commit
414dbe0089

+ 2 - 1
ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin_xml.py

@@ -120,7 +120,8 @@ def setup_ranger_plugin(component_select_name, service_name,
       group = component_group,
       group = component_group,
       mode=0744)
       mode=0744)
 
 
-    setup_ranger_plugin_jar_symblink(hdp_version, service_name, component_list)
+    #This should be done by rpm
+    #setup_ranger_plugin_jar_symblink(hdp_version, service_name, component_list)
 
 
     setup_ranger_plugin_keystore(service_name, audit_db_is_enabled, hdp_version, credential_file,
     setup_ranger_plugin_keystore(service_name, audit_db_is_enabled, hdp_version, credential_file,
               xa_audit_db_password, ssl_truststore_password, ssl_keystore_password,
               xa_audit_db_password, ssl_truststore_password, ssl_keystore_password,

+ 7 - 1
ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py

@@ -22,6 +22,7 @@ from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
 from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from utils import get_bare_principal
 from utils import get_bare_principal
+from resource_management.libraries.functions.get_hdp_version import get_hdp_version
 
 
 import status_params
 import status_params
 
 
@@ -198,10 +199,15 @@ if has_ranger_admin and is_supported_kafka_ranger:
   downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
   downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
 
 
   driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
   driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
-  driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
+  driver_curl_target = format("{kafka_home}libs/{jdbc_jar_name}")
 
 
   ranger_audit_solr_urls = config['configurations']['ranger-admin-site']['ranger.audit.solr.urls']
   ranger_audit_solr_urls = config['configurations']['ranger-admin-site']['ranger.audit.solr.urls']
   xa_audit_db_is_enabled = config['configurations']['ranger-kafka-audit']['xasecure.audit.destination.db'] if xml_configurations_supported else None
   xa_audit_db_is_enabled = config['configurations']['ranger-kafka-audit']['xasecure.audit.destination.db'] if xml_configurations_supported else None
   ssl_keystore_password = unicode(config['configurations']['ranger-kafka-policymgr-ssl']['xasecure.policymgr.clientssl.keystore.password']) if xml_configurations_supported else None
   ssl_keystore_password = unicode(config['configurations']['ranger-kafka-policymgr-ssl']['xasecure.policymgr.clientssl.keystore.password']) if xml_configurations_supported else None
   ssl_truststore_password = unicode(config['configurations']['ranger-kafka-policymgr-ssl']['xasecure.policymgr.clientssl.truststore.password']) if xml_configurations_supported else None
   ssl_truststore_password = unicode(config['configurations']['ranger-kafka-policymgr-ssl']['xasecure.policymgr.clientssl.truststore.password']) if xml_configurations_supported else None
   credential_file = format('/etc/ranger/{repo_name}/cred.jceks') if xml_configurations_supported else None
   credential_file = format('/etc/ranger/{repo_name}/cred.jceks') if xml_configurations_supported else None
+
+  hdp_version = get_hdp_version('kafka-broker')
+  setup_ranger_env_sh_source = format('/usr/hdp/{hdp_version}/ranger-kafka-plugin/install/conf.templates/enable/kafka-ranger-env.sh')
+  setup_ranger_env_sh_target = format("{conf_dir}/kafka-ranger-env.sh")
+

+ 9 - 1
ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/setup_ranger_kafka.py

@@ -15,13 +15,15 @@ See the License for the specific language governing permissions and
 limitations under the License.
 limitations under the License.
 """
 """
 from resource_management.core.logger import Logger
 from resource_management.core.logger import Logger
-from resource_management.libraries.functions.setup_ranger_plugin_xml import setup_ranger_plugin
+from resource_management.core.resources import Execute
+from resource_management.libraries.functions.format import format
 
 
 def setup_ranger_kafka():
 def setup_ranger_kafka():
   import params
   import params
 
 
   if params.has_ranger_admin:
   if params.has_ranger_admin:
 
 
+    from resource_management.libraries.functions.setup_ranger_plugin_xml import setup_ranger_plugin
     setup_ranger_plugin('kafka-broker', 'kafka', 
     setup_ranger_plugin('kafka-broker', 'kafka', 
                         params.downloaded_custom_connector, params.driver_curl_source,
                         params.downloaded_custom_connector, params.driver_curl_source,
                         params.driver_curl_target, params.java64_home,
                         params.driver_curl_target, params.java64_home,
@@ -37,5 +39,11 @@ def setup_ranger_kafka():
                         credential_file=params.credential_file, xa_audit_db_password=params.xa_audit_db_password, 
                         credential_file=params.credential_file, xa_audit_db_password=params.xa_audit_db_password, 
                         ssl_truststore_password=params.ssl_truststore_password, ssl_keystore_password=params.ssl_keystore_password,
                         ssl_truststore_password=params.ssl_truststore_password, ssl_keystore_password=params.ssl_keystore_password,
                         api_version = 'v2')
                         api_version = 'v2')
+    
+    if params.enable_ranger_kafka: 
+      Execute(('cp', '--remove-destination', params.setup_ranger_env_sh_source, params.setup_ranger_env_sh_target),
+        not_if=format("test -f {setup_ranger_env_sh_target}"),
+        sudo=True
+      )
   else:
   else:
     Logger.info('Ranger admin not installed')
     Logger.info('Ranger admin not installed')

+ 1 - 1
ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py

@@ -210,7 +210,7 @@ if has_ranger_admin:
   downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
   downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
 
 
   driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
   driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
-  driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
+  driver_curl_target = format("/usr/hdp/current/knox-server/ext/{jdbc_jar_name}")
 
 
   knox_ranger_plugin_config = {
   knox_ranger_plugin_config = {
     'username': repo_config_username,
     'username': repo_config_username,

+ 1 - 1
ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/configuration/ranger-kms-site.xml

@@ -22,7 +22,7 @@
 <configuration>
 <configuration>
   <property>
   <property>
     <name>ranger.service.host</name>
     <name>ranger.service.host</name>
-    <value>{{ranger_admin_hosts}}</value>
+    <value>{{kms_host}}</value>
   </property>
   </property>
 
 
   <property>
   <property>

+ 1 - 1
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py

@@ -216,7 +216,7 @@ if has_ranger_admin:
   downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
   downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
   
   
   driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
   driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
-  driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
+  driver_curl_target = format("{storm_component_home_dir}/lib/{jdbc_jar_name}")
 
 
   storm_ranger_plugin_config = {
   storm_ranger_plugin_config = {
     'username': repo_config_username,
     'username': repo_config_username,

+ 62 - 0
ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER/alerts.json

@@ -0,0 +1,62 @@
+{
+  "RANGER": {
+    "service": [],
+    "RANGER_ADMIN": [
+      {
+        "name": "ranger_admin_process",
+        "label": "Ranger Admin Process",
+        "description": "This host-level alert is triggered if the Ranger Admin Web UI is unreachable.",
+        "interval": 1,
+        "scope": "ANY",
+        "source": {
+          "type": "WEB",
+          "uri": {
+              "http": "{{admin-properties/policymgr_external_url}}",
+              "https": "{{admin-properties/policymgr_external_url}}",
+              "https_property": "{{ranger-admin-site/ranger.service.https.attrib.ssl.enabled}}",
+              "https_property_value": "true",
+              "connection_timeout": 5.0
+            },
+          "reporting": {
+            "ok": {
+              "text": "HTTP {0} response in {2:.3f}s"
+            },
+            "warning": {
+              "text": "HTTP {0} response from {1} in {2:.3f}s ({3})"
+            },
+            "critical": {
+              "text": "Connection failed to {1} ({3})"
+            }
+          }
+        }
+      }
+    ],
+    "RANGER_USERSYNC": [
+      {
+        "name": "ranger_usersync_process",
+        "label": "Ranger Usersync Process",
+        "description": "This host-level alert is triggered if the Ranger Usersync cannot be determined to be up.",
+        "interval": 1,
+        "scope": "HOST",
+        "source": {
+          "type": "PORT",
+          "uri": "{{ranger-ugsync-site/ranger.usersync.port}}",
+          "default_port": 5151,
+          "reporting": {
+            "ok": {
+              "text": "TCP OK - {0:.3f}s response on port {1}"
+            },
+            "warning": {
+              "text": "TCP OK - {0:.3f}s response on port {1}",
+              "value": 1.5
+            },
+            "critical": {
+              "text": "Connection failed: {0} to {1}:{2}",
+              "value": 5.0
+            }
+          }
+        }
+      }
+    ]
+  }
+}

+ 32 - 2
ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py

@@ -25,7 +25,8 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
       "TEZ": self.recommendTezConfigurations,
       "TEZ": self.recommendTezConfigurations,
       "HDFS": self.recommendHDFSConfigurations,
       "HDFS": self.recommendHDFSConfigurations,
       "HIVE": self.recommendHIVEConfigurations,
       "HIVE": self.recommendHIVEConfigurations,
-      "HBASE": self.recommendHBASEConfigurations
+      "HBASE": self.recommendHBASEConfigurations,
+      "KAFKA": self.recommendKAFKAConfigurations,
     }
     }
     parentRecommendConfDict.update(childRecommendConfDict)
     parentRecommendConfDict.update(childRecommendConfDict)
     return parentRecommendConfDict
     return parentRecommendConfDict
@@ -86,12 +87,22 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
       if ("RANGER" in servicesList) and (rangerPluginEnabled.lower() == 'Yes'.lower()):
       if ("RANGER" in servicesList) and (rangerPluginEnabled.lower() == 'Yes'.lower()):
         putHdfsSiteProperty("dfs.namenode.inode.attributes.provider.class",'org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizer')
         putHdfsSiteProperty("dfs.namenode.inode.attributes.provider.class",'org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizer')
 
 
+  def recommendKAFKAConfigurations(self, configurations, clusterData, services, hosts):
+    putKafkaBrokerProperty = self.putProperty(configurations, "kafka-broker", services)
+
+    servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
+    if 'ranger-kafka-plugin-properties' in services['configurations'] and ('ranger-kafka-plugin-enabled' in services['configurations']['ranger-kafka-plugin-properties']['properties']):
+      rangerPluginEnabled = services['configurations']['ranger-kafka-plugin-properties']['properties']['ranger-kafka-plugin-enabled']
+      if ("RANGER" in servicesList) and (rangerPluginEnabled.lower() == "Yes".lower()):
+        putKafkaBrokerProperty("authorizer.class.name", 'org.apache.ranger.authorization.kafka.authorizer.RangerKafkaAuthorizer')
+
   def getServiceConfigurationValidators(self):
   def getServiceConfigurationValidators(self):
       parentValidators = super(HDP23StackAdvisor, self).getServiceConfigurationValidators()
       parentValidators = super(HDP23StackAdvisor, self).getServiceConfigurationValidators()
       childValidators = {
       childValidators = {
         "HDFS": {"hdfs-site": self.validateHDFSConfigurations},
         "HDFS": {"hdfs-site": self.validateHDFSConfigurations},
         "HIVE": {"hiveserver2-site": self.validateHiveServer2Configurations},
         "HIVE": {"hiveserver2-site": self.validateHiveServer2Configurations},
-        "HBASE": {"hbase-site": self.validateHBASEConfigurations}
+        "HBASE": {"hbase-site": self.validateHBASEConfigurations},
+        "KAKFA": {"kafka-broker": self.validateKAFKAConfigurations}        
       }
       }
       self.mergeValidators(parentValidators, childValidators)
       self.mergeValidators(parentValidators, childValidators)
       return parentValidators
       return parentValidators
@@ -197,6 +208,25 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
 
 
     return self.toConfigurationValidationProblems(validationItems, "hbase-site")
     return self.toConfigurationValidationProblems(validationItems, "hbase-site")
 
 
+  def validateKAFKAConfigurations(self, properties, recommendedDefaults, configurations, services, hosts):
+    kafka_broker = properties
+    validationItems = []
+
+    #Adding Ranger Plugin logic here
+    ranger_plugin_properties = getSiteProperties(configurations, "ranger-kafka-plugin-properties")
+    ranger_plugin_enabled = ranger_plugin_properties['ranger-kafka-plugin-enabled']
+    prop_name = 'authorizer.class.name'
+    prop_val = "org.apache.ranger.authorization.kafka.authorizer.RangerKafkaAuthorizer"
+    servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
+    if ("RANGER" in servicesList) and (ranger_plugin_enabled.lower() == 'Yes'.lower()):
+      if kafka_broker[prop_name] != prop_val:
+        validationItems.append({"config-name": prop_name,
+                                "item": self.getWarnItem(
+                                "If Ranger Kafka Plugin is enabled."\
+                                "{0} needs to be set to {1}".format(prop_name,prop_val))})
+
+    return self.toConfigurationValidationProblems(validationItems, "kafka-broker")
+
 
 
   def isComponentUsingCardinalityForLayout(self, componentName):
   def isComponentUsingCardinalityForLayout(self, componentName):
     return componentName in ['NFS_GATEWAY', 'PHOENIX_QUERY_SERVER']
     return componentName in ['NFS_GATEWAY', 'PHOENIX_QUERY_SERVER']

+ 27 - 0
ambari-web/app/data/HDP2.3/site_properties.js

@@ -393,6 +393,33 @@ hdp23properties.push({
     "category": "Advanced ranger-yarn-audit",
     "category": "Advanced ranger-yarn-audit",
     "serviceName": "YARN"
     "serviceName": "YARN"
   },
   },
+  {
+    "id": "site property",
+    "name": "xasecure.audit.destination.db",
+    "displayName": "Audit to DB",
+    "displayType": "checkbox",
+    "filename": "ranger-kafka-audit.xml",
+    "category": "Advanced ranger-kafka-audit",
+    "serviceName": "KAFKA"
+  },
+  {
+    "id": "site property",
+    "name": "xasecure.audit.destination.hdfs",
+    "displayName": "Audit to HDFS",
+    "displayType": "checkbox",
+    "filename": "ranger-kafka-audit.xml",
+    "category": "Advanced ranger-kafka-audit",
+    "serviceName": "KAFKA"
+  },
+  {
+    "id": "site property",
+    "name": "xasecure.audit.destination.solr",
+    "displayName": "Audit to SOLR",
+    "displayType": "checkbox",
+    "filename": "ranger-kafka-audit.xml",
+    "category": "Advanced ranger-kafka-audit",
+    "serviceName": "KAFKA"
+  },
   {
   {
     "id": "site property",
     "id": "site property",
     "name": "xasecure.audit.provider.summary.enabled",
     "name": "xasecure.audit.provider.summary.enabled",