Browse Source

commit 4f2ed207a71bbfa6e2de0c1ce171fdb0bcee52bb
Author: Devaraj Das <ddas@yahoo-inc.com>
Date: Tue Mar 9 23:53:50 2010 -0800

MAPREDUCE:1566 from https://issues.apache.org/jira/secure/attachment/12438376/mr-1566-1.1.patch

+++ b/YAHOO-CHANGES.txt
+ MAPREDUCE:1566. Fixes bugs in the earlier patch. (ddas)
+


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.20-security-patches@1077306 13f79535-47bb-0310-9956-ffa450edef68

Owen O'Malley 14 years ago
parent
commit
3c7092739d

+ 27 - 8
src/core/org/apache/hadoop/security/Credentials.java

@@ -19,23 +19,24 @@
 package org.apache.hadoop.security;
 
 import java.io.DataInput;
+import java.io.DataInputStream;
 import java.io.DataOutput;
+import java.io.DataOutputStream;
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
-import org.apache.hadoop.conf.Configuration;
 
 /**
  * A class that provides the facilities of reading and writing 
@@ -114,29 +115,47 @@ public class Credentials implements Writable {
   public void readTokenStorageFile(Path filename, 
                                    Configuration conf) throws IOException {
     FSDataInputStream in = filename.getFileSystem(conf).open(filename);
+    try {
+    readTokenStorageStream(in);
+    } catch(IOException ioe) {
+      throw new IOException("Exception reading " + filename, ioe);
+    } finally {
+      in.close();
+    }
+  }
+  
+  /**
+   * Convenience method for reading a token storage file directly from a 
+   * datainputstream
+   */
+  public void readTokenStorageStream(DataInputStream in) throws IOException {
     byte[] magic = new byte[TOKEN_STORAGE_MAGIC.length];
     in.readFully(magic);
     if (!Arrays.equals(magic, TOKEN_STORAGE_MAGIC)) {
-      throw new IOException("Bad header found in token storage " + filename);
+      throw new IOException("Bad header found in token storage.");
     }
     byte version = in.readByte();
     if (version != TOKEN_STORAGE_VERSION) {
       throw new IOException("Unknown version " + version + 
-                            " in token storage " + filename);
+                            " in token storage.");
     }
     readFields(in);
-    in.close();
   }
   
   private static final byte[] TOKEN_STORAGE_MAGIC = "HDTS".getBytes();
   private static final byte TOKEN_STORAGE_VERSION = 0;
   
-  public void writeTokenStorageFile(Path filename, 
-                                    Configuration conf) throws IOException {
-    FSDataOutputStream os = filename.getFileSystem(conf).create(filename);
+  public void writeTokenStorageToStream(DataOutputStream os)
+    throws IOException {
     os.write(TOKEN_STORAGE_MAGIC);
     os.write(TOKEN_STORAGE_VERSION);
     write(os);
+  }
+
+  public void writeTokenStorageFile(Path filename, 
+                                    Configuration conf) throws IOException {
+    FSDataOutputStream os = filename.getFileSystem(conf).create(filename);
+    writeTokenStorageToStream(os);
     os.close();
   }
 

+ 3 - 4
src/hdfs/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java

@@ -126,7 +126,7 @@ public class DelegationTokenFetcher {
     
     Credentials ts = new Credentials();
     ts.addToken(new Text(shortName), token);
-    ts.write(out);
+    ts.writeTokenStorageToStream(out);
   }
 
   /**
@@ -137,7 +137,6 @@ public class DelegationTokenFetcher {
   throws IOException {
     // Enable Kerberos sockets
    System.setProperty("https.cipherSuites", "TLS_KRB5_WITH_3DES_EDE_CBC_SHA");
-   DataOutputStream file = null;
    DataInputStream dis = null;
    
    try {
@@ -162,7 +161,6 @@ public class DelegationTokenFetcher {
      throw new IOException("Unable to obtain remote token", e);
    } finally {
      if(dis != null) dis.close();
-     if(file != null) file.close();
    }
  }
   /**
@@ -173,8 +171,9 @@ public class DelegationTokenFetcher {
   static private void getDTfromRemoteIntoFile(String nnAddr, String filename) 
   throws IOException {
     Credentials ts = getDTfromRemote(nnAddr, null); 
+
     DataOutputStream file = new DataOutputStream(new FileOutputStream(filename));
-    ts.write(file);
+    ts.writeTokenStorageToStream(file);
     file.flush();
     System.out.println("Successfully wrote token of " + file.size() 
         + " bytes  to " + filename);

+ 18 - 0
src/mapred/org/apache/hadoop/mapreduce/security/TokenCache.java

@@ -87,7 +87,10 @@ public class TokenCache {
   throws IOException {
     // get jobtracker principal id (for the renewer)
     Text jtCreds = new Text(conf.get(JobTracker.JT_USER_NAME, ""));
+    boolean notReadFile = true;
     for(Path p: ps) {
+      //TODO: Connecting to the namenode is not required in the case,
+      //where we already have the credentials in the file
       FileSystem fs = FileSystem.get(p.toUri(), conf);
       if(fs instanceof DistributedFileSystem) {
         DistributedFileSystem dfs = (DistributedFileSystem)fs;
@@ -101,6 +104,21 @@ public class TokenCache {
           LOG.debug("DT for " + token.getService()  + " is already present");
           continue;
         }
+        if (notReadFile) { //read the file only once
+          String binaryTokenFilename =
+            conf.get("mapreduce.job.credentials.binary");
+          if (binaryTokenFilename != null) {
+            credentials.readTokenStorageFile(new Path("file:///" +  
+                binaryTokenFilename), conf);
+          }
+          notReadFile = false;
+          token = 
+            TokenCache.getDelegationToken(credentials, fs_addr); 
+          if(token != null) {
+            LOG.debug("DT for " + token.getService()  + " is already present");
+            continue;
+          }
+        }
         // get the token
         token = dfs.getDelegationToken(jtCreds);
         if(token==null) 

+ 2 - 1
src/test/org/apache/hadoop/tools/TestDelegationTokenFetcher.java

@@ -83,7 +83,8 @@ public class TestDelegationTokenFetcher {
     Credentials ts = new Credentials();
     DataInputStream dis = 
       new DataInputStream(new ByteArrayInputStream(baos.toByteArray()));
-    ts.readFields(dis);
+    
+    ts.readTokenStorageStream(dis);
     Token<? extends TokenIdentifier> newToken = ts.getToken(new Text(SHORT_NAME));
     
     assertEquals("Should only be one token in storage", ts.numberOfTokens(), 1);