Browse Source

HADOOP-15889. Add hadoop.token configuration parameter to load tokens. Contributed by Íñigo Goiri

Ajay Kumar 6 years ago
parent
commit
0d61facd37

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java

@@ -630,6 +630,8 @@ public class CommonConfigurationKeysPublic {
    */
   public static final String HADOOP_TOKEN_FILES =
       "hadoop.token.files";
+  public static final String HADOOP_TOKENS =
+      "hadoop.tokens";
   public static final String HADOOP_HTTP_AUTHENTICATION_TYPE =
     "hadoop.http.authentication.type";
 

+ 53 - 38
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java

@@ -23,13 +23,14 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_KERBEROS
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_KERBEROS_KEYTAB_LOGIN_AUTORENEWAL_ENABLED;
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_KERBEROS_KEYTAB_LOGIN_AUTORENEWAL_ENABLED_DEFAULT;
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_TOKEN_FILES;
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_TOKENS;
 import static org.apache.hadoop.security.UGIExceptionMessages.*;
 import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
+import static org.apache.hadoop.util.StringUtils.getTrimmedStringCollection;
 
 import com.google.common.annotations.VisibleForTesting;
 
 import java.io.File;
-import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.lang.reflect.UndeclaredThrowableException;
 import java.security.AccessControlContext;
@@ -42,10 +43,10 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
-import java.util.Date;
 import java.util.EnumMap;
 import java.util.HashMap;
 import java.util.Iterator;
+import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Optional;
@@ -88,7 +89,6 @@ import org.apache.hadoop.security.authentication.util.KerberosUtil;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.util.Shell;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Time;
 
 import org.slf4j.Logger;
@@ -298,7 +298,9 @@ public class UserGroupInformation {
   
   /**Environment variable pointing to the token cache file*/
   public static final String HADOOP_TOKEN_FILE_LOCATION = 
-    "HADOOP_TOKEN_FILE_LOCATION";
+      "HADOOP_TOKEN_FILE_LOCATION";
+  /** Environment variable pointing to the base64 tokens. */
+  public static final String HADOOP_TOKEN = "HADOOP_TOKEN";
   
   public static boolean isInitialized() {
     return conf != null;
@@ -752,45 +754,58 @@ public class UserGroupInformation {
       }
       loginUser = proxyUser == null ? realUser : createProxyUser(proxyUser, realUser);
 
-      String tokenFileLocation = System.getProperty(HADOOP_TOKEN_FILES);
-      if (tokenFileLocation == null) {
-        tokenFileLocation = conf.get(HADOOP_TOKEN_FILES);
-      }
-      if (tokenFileLocation != null) {
-        for (String tokenFileName:
-             StringUtils.getTrimmedStrings(tokenFileLocation)) {
-          if (tokenFileName.length() > 0) {
-            File tokenFile = new File(tokenFileName);
-            if (tokenFile.exists() && tokenFile.isFile()) {
-              Credentials cred = Credentials.readTokenStorageFile(
-                  tokenFile, conf);
-              loginUser.addCredentials(cred);
-            } else {
-              LOG.info("tokenFile("+tokenFileName+") does not exist");
-            }
+      // Load tokens from files
+      final Collection<String> tokenFileLocations = new LinkedHashSet<>();
+      tokenFileLocations.addAll(getTrimmedStringCollection(
+          System.getProperty(HADOOP_TOKEN_FILES)));
+      tokenFileLocations.addAll(getTrimmedStringCollection(
+          conf.get(HADOOP_TOKEN_FILES)));
+      tokenFileLocations.addAll(getTrimmedStringCollection(
+          System.getenv(HADOOP_TOKEN_FILE_LOCATION)));
+      for (String tokenFileLocation : tokenFileLocations) {
+        if (tokenFileLocation != null && tokenFileLocation.length() > 0) {
+          File tokenFile = new File(tokenFileLocation);
+          LOG.debug("Reading credentials from location {}",
+              tokenFile.getCanonicalPath());
+          if (tokenFile.exists() && tokenFile.isFile()) {
+            Credentials cred = Credentials.readTokenStorageFile(
+                tokenFile, conf);
+            LOG.debug("Loaded {} tokens from {}", cred.numberOfTokens(),
+                tokenFile.getCanonicalPath());
+            loginUser.addCredentials(cred);
+          } else {
+            LOG.info("Token file {} does not exist",
+                tokenFile.getCanonicalPath());
           }
         }
       }
 
-      String fileLocation = System.getenv(HADOOP_TOKEN_FILE_LOCATION);
-      if (fileLocation != null) {
-        // Load the token storage file and put all of the tokens into the
-        // user. Don't use the FileSystem API for reading since it has a lock
-        // cycle (HADOOP-9212).
-        File source = new File(fileLocation);
-        LOG.debug("Reading credentials from location set in {}: {}",
-            HADOOP_TOKEN_FILE_LOCATION,
-            source.getCanonicalPath());
-        if (!source.isFile()) {
-          throw new FileNotFoundException("Source file "
-              + source.getCanonicalPath() + " from "
-              + HADOOP_TOKEN_FILE_LOCATION
-              + " not found");
+      // Load tokens from base64 encoding
+      final Collection<String> tokensBase64 = new LinkedHashSet<>();
+      tokensBase64.addAll(getTrimmedStringCollection(
+          System.getProperty(HADOOP_TOKENS)));
+      tokensBase64.addAll(getTrimmedStringCollection(
+          conf.get(HADOOP_TOKENS)));
+      tokensBase64.addAll(getTrimmedStringCollection(
+          System.getenv(HADOOP_TOKEN)));
+      int numTokenBase64 = 0;
+      for (String tokenBase64 : tokensBase64) {
+        if (tokenBase64 != null && tokenBase64.length() > 0) {
+          try {
+            Token<TokenIdentifier> token = new Token<>();
+            token.decodeFromUrlString(tokenBase64);
+            Credentials cred = new Credentials();
+            cred.addToken(token.getService(), token);
+            loginUser.addCredentials(cred);
+            numTokenBase64++;
+          } catch (IOException ioe) {
+            LOG.error("Cannot add token {}: {}",
+                tokenBase64, ioe.getMessage());
+          }
         }
-        Credentials cred = Credentials.readTokenStorageFile(
-            source, conf);
-        LOG.debug("Loaded {} tokens", cred.numberOfTokens());
-        loginUser.addCredentials(cred);
+      }
+      if (numTokenBase64 > 0) {
+        LOG.debug("Loaded {} base64 tokens", numTokenBase64);
       }
     } catch (IOException ioe) {
       LOG.debug("failure to load login credentials", ioe);

+ 93 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java

@@ -1056,6 +1056,7 @@ public class TestUserGroupInformation {
     Collection<Token<?>> credsugiTokens = tokenUgi.getTokens();
     assertTrue(credsugiTokens.contains(token1));
     assertTrue(credsugiTokens.contains(token2));
+    System.clearProperty("hadoop.token.files");
   }
 
   @Test
@@ -1270,4 +1271,96 @@ public class TestUserGroupInformation {
     // isDestroyed should be called at least once
     Mockito.verify(tgt, atLeastOnce()).isDestroyed();
   }
+
+  @Test
+  public void testImportTokensFromConfig() throws IOException {
+    Configuration config = new Configuration();
+
+    // Add a base64 token
+    String service0 = "testTokenImportService0";
+    byte[] identity = "identityImportConfig".getBytes();
+    byte[] password = "passwordImportConfig".getBytes();
+    Token<TokenIdentifier> expectedToken0 = new Token<>(identity, password,
+        new Text("testTokenKind0"), new Text(service0));
+    String tokenBase64 = expectedToken0.encodeToUrlString();
+    config.set(CommonConfigurationKeysPublic.HADOOP_TOKENS,
+        tokenBase64 + ",badtoken");
+
+    // Add a token from a file
+    String service1 = "testTokenImportService1";
+    Credentials cred0 = new Credentials();
+    Token<TokenIdentifier> expectedToken1 = expectedToken0.copyToken();
+    expectedToken1.setKind(new Text("testTokenKind1"));
+    expectedToken1.setService(new Text(service1));
+    cred0.addToken(expectedToken1.getService(), expectedToken1);
+    Path workDir = new Path(
+        GenericTestUtils.getRandomizedTestDir().getAbsolutePath());
+    Path tokenPath1 = new Path(workDir, "dt.token");
+    cred0.writeTokenStorageFile(tokenPath1, config);
+    config.set(CommonConfigurationKeysPublic.HADOOP_TOKEN_FILES,
+        tokenPath1 + "," + new Path(workDir, "badfile"));
+
+    UserGroupInformation.reset();
+    UserGroupInformation.setConfiguration(config);
+
+    // Check if the tokens were loaded
+    UserGroupInformation ugi = UserGroupInformation.getLoginUser();
+    Credentials outCred = ugi.getCredentials();
+    assertEquals("Tokens: " + outCred.getAllTokens(),
+        2, outCred.getAllTokens().size());
+    boolean found0 = false;
+    boolean found1 = false;
+    for (Token<? extends TokenIdentifier> token : outCred.getAllTokens()) {
+      assertArrayEquals(identity, token.getIdentifier());
+      if (token.getService().toString().equals(service0)) {
+        assertEquals(expectedToken0.encodeToUrlString(),
+            token.encodeToUrlString());
+        found0 = true;
+      }
+      if (token.getService().toString().equals(service1)) {
+        found1 = true;
+      }
+    }
+    assertTrue("Expected token testTokenService0 not found: " + outCred,
+        found0);
+    assertTrue("Expected token testTokenService1 not found: " + outCred,
+        found1);
+
+    // Try to add the same token through configuration and file
+    Credentials cred1 = new Credentials();
+    cred1.addToken(expectedToken0.getService(), expectedToken0);
+    cred1.writeTokenStorageFile(tokenPath1, config);
+
+    UserGroupInformation.reset();
+    UserGroupInformation.setConfiguration(config);
+
+    UserGroupInformation ugi1 = UserGroupInformation.getLoginUser();
+    Credentials outCred1 = ugi1.getCredentials();
+    assertEquals("Tokens: " + outCred1.getAllTokens(),
+        1, outCred1.getAllTokens().size());
+  }
+
+  @Test
+  public void testImportTokensFromProperty() throws IOException {
+    // Add a base64 token
+    Text service = new Text("testTokenProperty");
+    byte[] identity = "identityImportProperty".getBytes();
+    byte[] password = "passwordImportProperty".getBytes();
+    Token<TokenIdentifier> expectedToken0 = new Token<>(identity, password,
+        new Text("testTokenKind0"), service);
+    String tokenBase64 = expectedToken0.encodeToUrlString();
+    System.setProperty(CommonConfigurationKeysPublic.HADOOP_TOKENS,
+        tokenBase64);
+
+    // Check if the tokens were loaded
+    UserGroupInformation.reset();
+    UserGroupInformation ugi = UserGroupInformation.getLoginUser();
+    Credentials creds = ugi.getCredentials();
+    assertEquals("Tokens: " + creds.getAllTokens(),
+        1, creds.getAllTokens().size());
+    assertArrayEquals(creds.getToken(service).getIdentifier(), identity);
+
+    // Cleanup
+    System.clearProperty(CommonConfigurationKeysPublic.HADOOP_TOKENS);
+  }
 }