浏览代码

AMBARI-19642. Error during Alert: Unable to authenticate through LDAP for Hiveserver2 (also floods HS2 log with error messages) (smohanty)

Sumit Mohanty 8 年之前
父节点
当前提交
66a2518cb5

+ 9 - 4
ambari-common/src/main/python/resource_management/libraries/functions/hive_check.py

@@ -25,7 +25,7 @@ from resource_management.libraries.functions import format
 
 def check_thrift_port_sasl(address, port, hive_auth="NOSASL", key=None, kinitcmd=None, smokeuser='ambari-qa',
                            transport_mode="binary", http_endpoint="cliservice", ssl=False, ssl_keystore=None,
-                           ssl_password=None, check_command_timeout=30):
+                           ssl_password=None, check_command_timeout=30, ldap_username="", ldap_password=""):
   """
   Hive thrift SASL port check
   """
@@ -49,12 +49,17 @@ def check_thrift_port_sasl(address, port, hive_auth="NOSASL", key=None, kinitcmd
   if hive_auth == "NOSASL":
     beeline_url.append('auth=noSasl')
 
+  credential_str = ""
+  # append username and password for LDAP
+  if hive_auth == "LDAP":
+    credential_str = "-n '{ldap_username}' -p '{ldap_password!p}'"
+
   # append url according to ssl configuration
   if ssl and ssl_keystore is not None and ssl_password is not None:
     beeline_url.extend(['ssl={ssl_str}', 'sslTrustStore={ssl_keystore}', 'trustStorePassword={ssl_password!p}'])
 
   # append url according to principal and execute kinit
-  if kinitcmd:
+  if kinitcmd and hive_auth != "LDAP":
     beeline_url.append('principal={key}')
 
     # prevent concurrent kinit
@@ -65,8 +70,8 @@ def check_thrift_port_sasl(address, port, hive_auth="NOSASL", key=None, kinitcmd
     finally:
       kinit_lock.release()
 
-  cmd = "! beeline -u '%s' -e '' 2>&1| awk '{print}'|grep -i -e 'Connection refused' -e 'Invalid URL'" % \
-        format(";".join(beeline_url))
+  cmd = "! beeline -u '%s' %s -e '' 2>&1| awk '{print}'|grep -i -e 'Connection refused' -e 'Invalid URL'" % \
+        (format(";".join(beeline_url)), format(credential_str))
 
   Execute(cmd,
     user=smokeuser,

+ 13 - 2
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_interactive_thrift_port.py

@@ -45,6 +45,8 @@ SMOKEUSER_KEY = '{{cluster-env/smokeuser}}'
 HIVE_SSL = '{{hive-site/hive.server2.use.SSL}}'
 HIVE_SSL_KEYSTORE_PATH = '{{hive-interactive-site/hive.server2.keystore.path}}'
 HIVE_SSL_KEYSTORE_PASSWORD = '{{hive-interactive-site/hive.server2.keystore.password}}'
+HIVE_LDAP_USERNAME = '{{hive-env/alert_ldap_username}}'
+HIVE_LDAP_PASSWORD = '{{hive-env/alert_ldap_password}}'
 
 # The configured Kerberos executable search paths, if any
 KERBEROS_EXECUTABLE_SEARCH_PATHS_KEY = '{{kerberos-env/executable_search_paths}}'
@@ -84,7 +86,8 @@ def get_tokens():
           HIVE_SERVER2_INTERACTIVE_AUTHENTICATION_KEY, HIVE_SERVER2_AUTHENTICATION_KEY,
           HIVE_SERVER_INTERACTIVE_PRINCIPAL_KEY, SMOKEUSER_KEYTAB_KEY, SMOKEUSER_PRINCIPAL_KEY,
           HIVE_SERVER_INTERACTIVE_THRIFT_HTTP_PORT_KEY, HIVE_SERVER_INTERACTIVE_TRANSPORT_MODE_KEY,
-          KERBEROS_EXECUTABLE_SEARCH_PATHS_KEY, HIVE_SSL, HIVE_SSL_KEYSTORE_PATH, HIVE_SSL_KEYSTORE_PASSWORD)
+          KERBEROS_EXECUTABLE_SEARCH_PATHS_KEY, HIVE_SSL, HIVE_SSL_KEYSTORE_PATH, HIVE_SSL_KEYSTORE_PASSWORD,
+          HIVE_LDAP_USERNAME, HIVE_LDAP_PASSWORD)
 
 
 @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
@@ -164,6 +167,13 @@ def execute(configurations={}, parameters={}, host_name=None):
   if SMOKEUSER_KEY in configurations:
     smokeuser = configurations[SMOKEUSER_KEY]
 
+  ldap_username = ""
+  ldap_password = ""
+  if HIVE_LDAP_USERNAME in configurations:
+    ldap_username = configurations[HIVE_LDAP_USERNAME]
+  if HIVE_LDAP_PASSWORD in configurations:
+    ldap_password = configurations[HIVE_LDAP_PASSWORD]
+
   result_code = None
 
   if security_enabled:
@@ -196,7 +206,8 @@ def execute(configurations={}, parameters={}, host_name=None):
       hive_check.check_thrift_port_sasl(host_name, port, hive_server2_authentication, hive_server_principal,
                                         kinitcmd, smokeuser, transport_mode=transport_mode, ssl=hive_ssl,
                                         ssl_keystore=hive_ssl_keystore_path, ssl_password=hive_ssl_keystore_password,
-                                        check_command_timeout=int(check_command_timeout))
+                                        check_command_timeout=int(check_command_timeout), ldap_username=ldap_username,
+                                        ldap_password=ldap_password)
       result_code = 'OK'
       total_time = time.time() - start_time
       label = OK_MESSAGE.format(total_time, port)

+ 13 - 2
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_thrift_port.py

@@ -44,6 +44,9 @@ SMOKEUSER_KEY = '{{cluster-env/smokeuser}}'
 HIVE_SSL = '{{hive-site/hive.server2.use.SSL}}'
 HIVE_SSL_KEYSTORE_PATH = '{{hive-site/hive.server2.keystore.path}}'
 HIVE_SSL_KEYSTORE_PASSWORD = '{{hive-site/hive.server2.keystore.password}}'
+HIVE_LDAP_USERNAME = '{{hive-env/alert_ldap_username}}'
+HIVE_LDAP_PASSWORD = '{{hive-env/alert_ldap_password}}'
+
 
 # The configured Kerberos executable search paths, if any
 KERBEROS_EXECUTABLE_SEARCH_PATHS_KEY = '{{kerberos-env/executable_search_paths}}'
@@ -83,7 +86,7 @@ def get_tokens():
           HIVE_SERVER2_AUTHENTICATION_KEY, HIVE_SERVER_PRINCIPAL_KEY,
           SMOKEUSER_KEYTAB_KEY, SMOKEUSER_PRINCIPAL_KEY, HIVE_SERVER_THRIFT_HTTP_PORT_KEY,
           HIVE_SERVER_TRANSPORT_MODE_KEY, KERBEROS_EXECUTABLE_SEARCH_PATHS_KEY, HIVE_SSL,
-          HIVE_SSL_KEYSTORE_PATH, HIVE_SSL_KEYSTORE_PASSWORD)
+          HIVE_SSL_KEYSTORE_PATH, HIVE_SSL_KEYSTORE_PASSWORD, HIVE_LDAP_USERNAME, HIVE_LDAP_PASSWORD)
 
 @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
 def get_tokens():
@@ -165,6 +168,13 @@ def execute(configurations={}, parameters={}, host_name=None):
   if SMOKEUSER_KEY in configurations:
     smokeuser = configurations[SMOKEUSER_KEY]
 
+  ldap_username = ""
+  ldap_password = ""
+  if HIVE_LDAP_USERNAME in configurations:
+    ldap_username = configurations[HIVE_LDAP_USERNAME]
+  if HIVE_LDAP_PASSWORD in configurations:
+    ldap_password = configurations[HIVE_LDAP_PASSWORD]
+
   result_code = None
 
   if security_enabled:
@@ -197,7 +207,8 @@ def execute(configurations={}, parameters={}, host_name=None):
       hive_check.check_thrift_port_sasl(host_name, port, hive_server2_authentication, hive_server_principal,
                                         kinitcmd, smokeuser, transport_mode=transport_mode, ssl=hive_ssl,
                                         ssl_keystore=hive_ssl_keystore_path, ssl_password=hive_ssl_keystore_password,
-                                        check_command_timeout=int(check_command_timeout))
+                                        check_command_timeout=int(check_command_timeout),ldap_username=ldap_username,
+                                        ldap_password=ldap_password)
       result_code = 'OK'
       total_time = time.time() - start_time
       label = OK_MESSAGE.format(total_time, port)

+ 35 - 1
ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-env.xml

@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_adding_forbidden="true">
+<configuration supports_adding_forbidden="false">
   <!-- hive-env.sh -->
   <property>
     <name>content</name>
@@ -81,4 +81,38 @@
         </value-attributes>
     <on-ambari-upgrade add="true"/>
     </property>
+    <property>
+      <name>alert_ldap_username</name>
+      <value></value>
+      <description>LDAP username to be used for alerts</description>
+      <display-name>LDAP user for Alerts</display-name>
+      <value-attributes>
+        <empty-value-valid>true</empty-value-valid>
+      </value-attributes>
+      <depends-on>
+        <property>
+          <type>hive-site</type>
+          <name>hive.server2.authentication</name>
+        </property>
+      </depends-on>
+      <on-ambari-upgrade add="false"/>
+    </property>
+  <property>
+    <name>alert_ldap_password</name>
+    <value></value>
+    <property-type>PASSWORD</property-type>
+    <description>Password to be used for LDAP user used in alerts</description>
+    <display-name>LDAP password for Alerts</display-name>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+      <type>password</type>
+    </value-attributes>
+    <depends-on>
+      <property>
+        <type>hive-site</type>
+        <name>hive.server2.authentication</name>
+      </property>
+    </depends-on>
+    <on-ambari-upgrade add="false"/>
+  </property>
 </configuration>

+ 22 - 1
ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py

@@ -108,7 +108,8 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
     childValidators = {
       "ATLAS": {"application-properties": self.validateAtlasConfigurations},
       "HIVE": {"hive-interactive-env": self.validateHiveInteractiveEnvConfigurations,
-               "hive-interactive-site": self.validateHiveInteractiveSiteConfigurations},
+               "hive-interactive-site": self.validateHiveInteractiveSiteConfigurations,
+               "hive-env": self.validateHiveConfigurationsEnv},
       "YARN": {"yarn-site": self.validateYARNConfigurations},
       "RANGER": {"ranger-tagsync-site": self.validateRangerTagsyncConfigurations},
       "SPARK2": {"spark2-defaults": self.validateSpark2Defaults,
@@ -365,6 +366,26 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
     validationProblems = self.toConfigurationValidationProblems(validationItems, "hive-interactive-site")
     return validationProblems
 
+  def validateHiveConfigurationsEnv(self, properties, recommendedDefaults, configurations, services, hosts):
+    parentValidationProblems = super(HDP25StackAdvisor, self).validateHiveConfigurationsEnv(properties, recommendedDefaults, configurations, services, hosts)
+    hive_site_properties = self.getSiteProperties(configurations, "hive-site")
+    hive_env_properties = self.getSiteProperties(configurations, "hive-env")
+    validationItems = []
+
+    if 'hive.server2.authentication' in hive_site_properties and "LDAP" == hive_site_properties['hive.server2.authentication']:
+      if 'alert_ldap_username' not in hive_env_properties or hive_env_properties['alert_ldap_username'] == "":
+        validationItems.append({"config-name": "alert_ldap_username",
+                                "item": self.getWarnItem(
+                                  "Provide an user to be used for alerts. Hive authentication type LDAP requires valid LDAP credentials for the alerts.")})
+      if 'alert_ldap_password' not in hive_env_properties or hive_env_properties['alert_ldap_password'] == "":
+        validationItems.append({"config-name": "alert_ldap_password",
+                                "item": self.getWarnItem(
+                                  "Provide the password for the alert user. Hive authentication type LDAP requires valid LDAP credentials for the alerts.")})
+
+    validationProblems = self.toConfigurationValidationProblems(validationItems, "hive-env")
+    validationProblems.extend(parentValidationProblems)
+    return validationProblems
+
   def validateHiveInteractiveEnvConfigurations(self, properties, recommendedDefaults, configurations, services, hosts):
     hive_site_env_properties = self.getSiteProperties(configurations, "hive-interactive-env")
     validationItems = []

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-env.xml

@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_adding_forbidden="true">
+<configuration supports_adding_forbidden="false">
   <property>
     <name>enable_heap_dump</name>
     <value>false</value>

+ 5 - 5
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py

@@ -42,7 +42,7 @@ class TestServiceCheck(RMFTestCase):
                         stack_version = self.STACK_VERSION,
                         target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assertResourceCalled('Execute', "! beeline -u 'jdbc:hive2://c6402.ambari.apache.org:10000/;transportMode=binary;auth=noSasl' -e '' 2>&1| awk '{print}'|grep -i -e 'Connection refused' -e 'Invalid URL'",
+    self.assertResourceCalled('Execute', "! beeline -u 'jdbc:hive2://c6402.ambari.apache.org:10000/;transportMode=binary;auth=noSasl'  -e '' 2>&1| awk '{print}'|grep -i -e 'Connection refused' -e 'Invalid URL'",
                               path = ['/bin/', '/usr/bin/', '/usr/lib/hive/bin/', '/usr/sbin/'],
                               user = 'ambari-qa',
                               timeout = 30,
@@ -156,7 +156,7 @@ class TestServiceCheck(RMFTestCase):
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM; ',
                               user = 'ambari-qa',
                               )
-    self.assertResourceCalled('Execute', "! beeline -u 'jdbc:hive2://c6402.ambari.apache.org:10000/;transportMode=binary;principal=hive/_HOST@EXAMPLE.COM' -e '' 2>&1| awk '{print}'|grep -i -e 'Connection refused' -e 'Invalid URL'",
+    self.assertResourceCalled('Execute', "! beeline -u 'jdbc:hive2://c6402.ambari.apache.org:10000/;transportMode=binary;principal=hive/_HOST@EXAMPLE.COM'  -e '' 2>&1| awk '{print}'|grep -i -e 'Connection refused' -e 'Invalid URL'",
                               path = ['/bin/', '/usr/bin/', '/usr/lib/hive/bin/', '/usr/sbin/'],
                               user = 'ambari-qa',
                               timeout = 30,
@@ -273,7 +273,7 @@ class TestServiceCheck(RMFTestCase):
       stack_version = self.STACK_VERSION,
       target = RMFTestCase.TARGET_COMMON_SERVICES)
 
-    self.assertResourceCalled('Execute', "! beeline -u 'jdbc:hive2://c6402.ambari.apache.org:10010/;transportMode=binary' -e '' 2>&1| awk '{print}'|grep -i -e 'Connection refused' -e 'Invalid URL'",
+    self.assertResourceCalled('Execute', "! beeline -u 'jdbc:hive2://c6402.ambari.apache.org:10010/;transportMode=binary'  -e '' 2>&1| awk '{print}'|grep -i -e 'Connection refused' -e 'Invalid URL'",
       path = ['/bin/', '/usr/bin/', '/usr/lib/hive/bin/', '/usr/sbin/'],
       timeout = 30,
       user = 'ambari-qa')
@@ -310,13 +310,13 @@ class TestServiceCheck(RMFTestCase):
       target = RMFTestCase.TARGET_COMMON_SERVICES)
 
     self.assertResourceCalled('Execute',
-      "! beeline -u 'jdbc:hive2://c6402.ambari.apache.org:10010/;transportMode=binary' -e '' 2>&1| awk '{print}'|grep -i -e 'Connection refused' -e 'Invalid URL'",
+      "! beeline -u 'jdbc:hive2://c6402.ambari.apache.org:10010/;transportMode=binary'  -e '' 2>&1| awk '{print}'|grep -i -e 'Connection refused' -e 'Invalid URL'",
       path = ['/bin/', '/usr/bin/', '/usr/lib/hive/bin/', '/usr/sbin/'],
       timeout = 30,
       user = 'ambari-qa')
 
     self.assertResourceCalled('Execute',
-      "! beeline -u 'jdbc:hive2://c6402.ambari.apache.org:10500/;transportMode=binary' -e '' 2>&1| awk '{print}'|grep -i -e 'Connection refused' -e 'Invalid URL'",
+      "! beeline -u 'jdbc:hive2://c6402.ambari.apache.org:10500/;transportMode=binary'  -e '' 2>&1| awk '{print}'|grep -i -e 'Connection refused' -e 'Invalid URL'",
       path = ['/bin/', '/usr/bin/', '/usr/lib/hive/bin/', '/usr/sbin/'],
       timeout = 30,
       user = 'ambari-qa')