瀏覽代碼

HADOOP-5980. Fix LinuxTaskController so tasks get passed LD_LIBRARY_PATH and other environment variables. Contributed by Sreekanth Ramakrishnan.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/core/trunk@784334 13f79535-47bb-0310-9956-ffa450edef68
Hemanth Yamijala 16 年之前
父節點
當前提交
6b673b05d0

+ 4 - 0
CHANGES.txt

@@ -813,6 +813,10 @@ Trunk (unreleased changes)
     HADOOP-6031. Remove @author tags from Java source files.  (Ravi Phulari
     via szetszwo)
 
+    HADOOP-5980. Fix LinuxTaskController so tasks get passed 
+    LD_LIBRARY_PATH and other environment variables.
+    (Sreekanth Ramakrishnan via yhemanth)
+
 Release 0.20.1 - Unreleased
 
   INCOMPATIBLE CHANGES

+ 14 - 1
src/mapred/org/apache/hadoop/mapred/LinuxTaskController.java

@@ -24,6 +24,7 @@ import java.io.IOException;
 import java.io.PrintWriter;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map.Entry;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -128,9 +129,21 @@ class LinuxTaskController extends TaskController {
       TaskLog.buildCommandLine(env.setup, env.vargs, env.stdout, env.stderr,
           env.logSize, true);
 
+    StringBuffer sb = new StringBuffer();
+    //export out all the environment variable before child command as
+    //the setuid/setgid binaries would not be getting, any environmental
+    //variables which begin with LD_*.
+    for(Entry<String, String> entry : env.env.entrySet()) {
+      sb.append("export ");
+      sb.append(entry.getKey());
+      sb.append("=");
+      sb.append(entry.getValue());
+      sb.append("\n");
+    }
+    sb.append(cmdLine);
     // write the command to a file in the
     // task specific cache directory
-    writeCommand(cmdLine, getTaskCacheDirectory(context));
+    writeCommand(sb.toString(), getTaskCacheDirectory(context));
     
     // Call the taskcontroller with the right parameters.
     List<String> launchTaskJVMArgs = buildLaunchTaskArgs(context);

+ 27 - 0
src/test/mapred/org/apache/hadoop/mapred/TestJobExecutionAsDifferentUser.java

@@ -18,6 +18,9 @@
 
 package org.apache.hadoop.mapred;
 
+import java.io.IOException;
+
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
 /**
@@ -41,4 +44,28 @@ public class TestJobExecutionAsDifferentUser extends
     assertTrue("Job failed", job.isSuccessful());
     assertOwnerShip(outDir);
   }
+  
+  public void testEnvironment() throws IOException {
+    if (!shouldRun()) {
+      return;
+    }
+    startCluster();
+    TestMiniMRChildTask childTask = new TestMiniMRChildTask();
+    Path inDir = new Path("input1");
+    Path outDir = new Path("output1");
+    try {
+      childTask.runTestTaskEnv(getClusterConf(), inDir, outDir);
+    } catch (IOException e) {
+      fail("IOException thrown while running enviroment test."
+          + e.getMessage());
+    } finally {
+      FileSystem outFs = outDir.getFileSystem(getClusterConf());
+      if (outFs.exists(outDir)) {
+        assertOwnerShip(outDir);
+        outFs.delete(outDir, true);
+      } else {
+        fail("Output directory does not exist" + outDir.toString());
+      }
+    }
+  }
 }

+ 19 - 18
src/test/mapred/org/apache/hadoop/mapred/TestMiniMRChildTask.java

@@ -265,28 +265,11 @@ public class TestMiniMRChildTask extends TestCase {
   public void testTaskEnv(){
     try {
       JobConf conf = mr.createJobConf();
-      
       // initialize input, output directories
       Path inDir = new Path("testing/wc/input1");
       Path outDir = new Path("testing/wc/output1");
-      String input = "The input";
-      
-      configure(conf, inDir, outDir, input, EnvCheckMapper.class);
-
       FileSystem outFs = outDir.getFileSystem(conf);
-      
-      // test 
-      //  - new SET of new var (MY_PATH)
-      //  - set of old var (HOME)
-      //  - append to an old var from modified env (LD_LIBRARY_PATH)
-      //  - append to an old var from tt's env (PATH)
-      //  - append to a new var (NEW_PATH)
-      conf.set("mapred.child.env", 
-               "MY_PATH=/tmp,HOME=/tmp,LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/tmp,"
-               + "PATH=$PATH:/tmp,NEW_PATH=$NEW_PATH:/tmp");
-      conf.set("path", System.getenv("PATH"));
-
-      JobClient.runJob(conf);
+      runTestTaskEnv(conf, inDir, outDir);
       outFs.delete(outDir, true);
     } catch(Exception e) {
       e.printStackTrace();
@@ -294,4 +277,22 @@ public class TestMiniMRChildTask extends TestCase {
       tearDown();
     }
   }
+  
+  void runTestTaskEnv(JobConf conf, Path inDir, Path outDir) throws IOException {
+    String input = "The input";
+    configure(conf, inDir, outDir, input, EnvCheckMapper.class);
+    // test 
+    //  - new SET of new var (MY_PATH)
+    //  - set of old var (HOME)
+    //  - append to an old var from modified env (LD_LIBRARY_PATH)
+    //  - append to an old var from tt's env (PATH)
+    //  - append to a new var (NEW_PATH)
+    conf.set("mapred.child.env", 
+             "MY_PATH=/tmp,HOME=/tmp,LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/tmp,"
+             + "PATH=$PATH:/tmp,NEW_PATH=$NEW_PATH:/tmp");
+    conf.set("path", System.getenv("PATH"));
+    RunningJob job = JobClient.runJob(conf);
+    assertTrue("The environment checker job failed.", job.isSuccessful());
+  }
+  
 }