Browse Source

HADOOP-19151. Support configurable SASL mechanism. (#6740)

Tsz-Wo Nicholas Sze 1 year ago
parent
commit
78987a71a6

+ 45 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslConstants.java

@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.security;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * SASL related constants.
+ */
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceStability.Evolving
+public class SaslConstants {
+  public static final Logger LOG = LoggerFactory.getLogger(SaslConstants.class);
+
+  private static final String SASL_MECHANISM_ENV = "HADOOP_SASL_MECHANISM";
+  public static final String SASL_MECHANISM;
+  private static final String SASL_MECHANISM_DEFAULT = "DIGEST-MD5";
+
+  static {
+    final String mechanism = System.getenv(SASL_MECHANISM_ENV);
+    LOG.debug("{} = {} (env)", SASL_MECHANISM_ENV, mechanism);
+    SASL_MECHANISM = mechanism != null? mechanism : SASL_MECHANISM_DEFAULT;
+    LOG.debug("{} = {} (effective)", SASL_MECHANISM_ENV, SASL_MECHANISM);
+  }
+
+  private SaslConstants() {}
+}

+ 7 - 11
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java

@@ -223,8 +223,8 @@ public class SaslRpcServer {
     SIMPLE((byte) 80, ""),
     KERBEROS((byte) 81, "GSSAPI"),
     @Deprecated
-    DIGEST((byte) 82, "DIGEST-MD5"),
-    TOKEN((byte) 82, "DIGEST-MD5"),
+    DIGEST((byte) 82, SaslConstants.SASL_MECHANISM),
+    TOKEN((byte) 82, SaslConstants.SASL_MECHANISM),
     PLAIN((byte) 83, "PLAIN");
 
     /** The code for this method. */
@@ -273,7 +273,7 @@ public class SaslRpcServer {
     }
   };
 
-  /** CallbackHandler for SASL DIGEST-MD5 mechanism */
+  /** CallbackHandler for SASL mechanism. */
   @InterfaceStability.Evolving
   public static class SaslDigestCallbackHandler implements CallbackHandler {
     private SecretManager<TokenIdentifier> secretManager;
@@ -309,7 +309,7 @@ public class SaslRpcServer {
           continue; // realm is ignored
         } else {
           throw new UnsupportedCallbackException(callback,
-              "Unrecognized SASL DIGEST-MD5 Callback");
+              "Unrecognized SASL Callback");
         }
       }
       if (pc != null) {
@@ -319,11 +319,8 @@ public class SaslRpcServer {
         UserGroupInformation user = null;
         user = tokenIdentifier.getUser(); // may throw exception
         connection.attemptingUser = user;
-        
-        if (LOG.isDebugEnabled()) {
-          LOG.debug("SASL server DIGEST-MD5 callback: setting password "
-              + "for client: " + tokenIdentifier.getUser());
-        }
+
+        LOG.debug("SASL server callback: setting password for client: {}", user);
         pc.setPassword(password);
       }
       if (ac != null) {
@@ -339,8 +336,7 @@ public class SaslRpcServer {
             UserGroupInformation logUser =
               getIdentifier(authzid, secretManager).getUser();
             String username = logUser == null ? null : logUser.getUserName();
-            LOG.debug("SASL server DIGEST-MD5 callback: setting "
-                + "canonicalized client ID: " + username);
+            LOG.debug("SASL server callback: setting authorizedID: {}", username);
           }
           ac.setAuthorizedID(authzid);
         }

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java

@@ -536,7 +536,7 @@ public class TestSaslRPC extends TestRpcBase {
   private static Pattern BadToken =
       Pattern.compile("^" + RemoteException.class.getName() +
           "\\("+ SaslException.class.getName() + "\\): " +
-          "DIGEST-MD5: digest response format violation.*");
+          SaslConstants.SASL_MECHANISM + ": digest response format violation.*");
   private static Pattern KrbFailed =
       Pattern.compile(".*Failed on local exception:.* " +
                       "Failed to specify server's Kerberos principal name.*");

+ 4 - 3
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslParticipant.java

@@ -32,6 +32,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.protocol.datatransfer.IOStreamPair;
 import org.apache.hadoop.security.FastSaslClientFactory;
 import org.apache.hadoop.security.FastSaslServerFactory;
+import org.apache.hadoop.security.SaslConstants;
 import org.apache.hadoop.security.SaslInputStream;
 import org.apache.hadoop.security.SaslOutputStream;
 
@@ -50,7 +51,7 @@ class SaslParticipant {
   // a short string.
   private static final String SERVER_NAME = "0";
   private static final String PROTOCOL = "hdfs";
-  private static final String MECHANISM = "DIGEST-MD5";
+  private static final String[] MECHANISM_ARRAY = {SaslConstants.SASL_MECHANISM};
 
   // One of these will always be null.
   private final SaslServer saslServer;
@@ -81,7 +82,7 @@ class SaslParticipant {
       Map<String, String> saslProps, CallbackHandler callbackHandler)
       throws SaslException {
     initializeSaslServerFactory();
-    return new SaslParticipant(saslServerFactory.createSaslServer(MECHANISM,
+    return new SaslParticipant(saslServerFactory.createSaslServer(MECHANISM_ARRAY[0],
       PROTOCOL, SERVER_NAME, saslProps, callbackHandler));
   }
 
@@ -99,7 +100,7 @@ class SaslParticipant {
       throws SaslException {
     initializeSaslClientFactory();
     return new SaslParticipant(
-        saslClientFactory.createSaslClient(new String[] {MECHANISM}, userName,
+        saslClientFactory.createSaslClient(MECHANISM_ARRAY, userName,
             PROTOCOL, SERVER_NAME, saslProps, callbackHandler));
   }
 

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java

@@ -241,7 +241,7 @@ public class SaslDataTransferServer {
           continue; // realm is ignored
         } else {
           throw new UnsupportedCallbackException(callback,
-              "Unrecognized SASL DIGEST-MD5 Callback: " + callback);
+              "Unrecognized SASL Callback: " + callback);
         }
       }