소스 검색

Remove parent's env vars from child processes

(cherry picked from commit b0bcb4e728e91c1c1acc514deb3f2c95626147b2)
Robert Kanter 9 년 전
부모
커밋
c3b7425bf1

+ 27 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java

@@ -32,6 +32,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.security.alias.AbstractJavaKeyStoreProvider;
 
 /** 
  * A base class for running a Unix command.
@@ -279,6 +280,8 @@ abstract public class Shell {
   /** If or not script timed out*/
   private AtomicBoolean timedOut;
 
+  /** Indicates if the parent env vars should be inherited or not*/
+  protected boolean inheritParentEnv = true;
 
   /** Centralized logic to discover and validate the sanity of the Hadoop 
    *  home directory. Returns either NULL or a directory that exists and 
@@ -466,6 +469,20 @@ abstract public class Shell {
     if (environment != null) {
       builder.environment().putAll(this.environment);
     }
+
+    // Remove all env vars from the Builder to prevent leaking of env vars from
+    // the parent process.
+    if (!inheritParentEnv) {
+      // branch-2: Only do this for HADOOP_CREDSTORE_PASSWORD
+      // Sometimes daemons are configured to use the CredentialProvider feature
+      // and given their jceks password via an environment variable.  We need to
+      // make sure to remove it so it doesn't leak to child processes, which
+      // might be owned by a different user.  For example, the NodeManager
+      // running a User's container.
+      builder.environment().remove(
+          AbstractJavaKeyStoreProvider.CREDENTIAL_PASSWORD_NAME);
+    }
+
     if (dir != null) {
       builder.directory(this.dir);
     }
@@ -683,6 +700,11 @@ abstract public class Shell {
       this(execString, dir, env , 0L);
     }
 
+    public ShellCommandExecutor(String[] execString, File dir,
+                                Map<String, String> env, long timeout) {
+      this(execString, dir, env , timeout, true);
+    }
+
     /**
      * Create a new instance of the ShellCommandExecutor to execute a command.
      * 
@@ -695,10 +717,12 @@ abstract public class Shell {
      *            environment is not modified.
      * @param timeout Specifies the time in milliseconds, after which the
      *                command will be killed and the status marked as timedout.
-     *                If 0, the command will not be timed out. 
+     *                If 0, the command will not be timed out.
+     * @param inheritParentEnv Indicates if the process should inherit the env
+     *                         vars from the parent process or not.
      */
     public ShellCommandExecutor(String[] execString, File dir, 
-        Map<String, String> env, long timeout) {
+        Map<String, String> env, long timeout, boolean inheritParentEnv) {
       command = execString.clone();
       if (dir != null) {
         setWorkingDirectory(dir);
@@ -707,6 +731,7 @@ abstract public class Shell {
         setEnvironment(env);
       }
       timeOutInterval = timeout;
+      this.inheritParentEnv = inheritParentEnv;
     }
         
 

+ 44 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShell.java

@@ -18,6 +18,7 @@
 package org.apache.hadoop.util;
 
 import junit.framework.TestCase;
+import org.apache.hadoop.security.alias.AbstractJavaKeyStoreProvider;
 
 import java.io.BufferedReader;
 import java.io.File;
@@ -27,8 +28,13 @@ import java.io.PrintWriter;
 import java.lang.management.ManagementFactory;
 import java.lang.management.ThreadInfo;
 import java.lang.management.ThreadMXBean;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
 
 import org.apache.hadoop.fs.FileUtil;
+import org.junit.Assume;
+import org.junit.Test;
 
 public class TestShell extends TestCase {
 
@@ -111,6 +117,44 @@ public class TestShell extends TestCase {
     shellFile.delete();
     assertTrue("Script didnt not timeout" , shexc.isTimedOut());
   }
+
+  @Test
+  public void testEnvVarsWithInheritance() throws Exception {
+    Assume.assumeFalse(Shell.WINDOWS);
+    testEnvHelper(true);
+  }
+
+  @Test
+  public void testEnvVarsWithoutInheritance() throws Exception {
+    Assume.assumeFalse(Shell.WINDOWS);
+    testEnvHelper(false);
+  }
+
+  private void testEnvHelper(boolean inheritParentEnv) throws Exception {
+    Map<String, String> customEnv = Collections.singletonMap(
+        AbstractJavaKeyStoreProvider.CREDENTIAL_PASSWORD_NAME, "foo");
+    Shell.ShellCommandExecutor command = new Shell.ShellCommandExecutor(
+        new String[]{"env"}, null, customEnv, 0L,
+        inheritParentEnv);
+    command.execute();
+    String[] varsArr = command.getOutput().split("\n");
+    Map<String, String> vars = new HashMap<>();
+    for (String var : varsArr) {
+      int eqIndex = var.indexOf('=');
+      vars.put(var.substring(0, eqIndex), var.substring(eqIndex + 1));
+    }
+    Map<String, String> expectedEnv = new HashMap<>();
+    expectedEnv.putAll(System.getenv());
+    if (inheritParentEnv) {
+      expectedEnv.putAll(customEnv);
+    } else {
+      assertFalse("child process environment should not have contained "
+              + AbstractJavaKeyStoreProvider.CREDENTIAL_PASSWORD_NAME,
+          vars.containsKey(
+              AbstractJavaKeyStoreProvider.CREDENTIAL_PASSWORD_NAME));
+    }
+    assertEquals(expectedEnv, vars);
+  }
   
   private static int countTimerThreads() {
     ThreadMXBean threadBean = ManagementFactory.getThreadMXBean();

+ 3 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java

@@ -269,7 +269,9 @@ public class DefaultContainerExecutor extends ContainerExecutor {
       return new ShellCommandExecutor(
           command,
           wordDir,
-          environment); 
+          environment,
+          0L,
+          false);
   }
 
   protected LocalWrapperScriptBuilder getLocalWrapperScriptBuilder(

+ 5 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java

@@ -237,9 +237,11 @@ public class DockerContainerExecutor extends ContainerExecutor {
         LOG.debug("launchContainer: " + commandStr + " " + Joiner.on(" ").join(command));
       }
       shExec = new ShellCommandExecutor(
-          command,
-          new File(containerWorkDir.toUri().getPath()),
-          container.getLaunchContext().getEnvironment());      // sanitized env
+        command,
+        new File(containerWorkDir.toUri().getPath()),
+        container.getLaunchContext().getEnvironment(),      // sanitized env
+        0L,
+        false);
       if (isContainerActive(containerId)) {
         shExec.execute();
       } else {

+ 2 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java

@@ -224,7 +224,8 @@ public class LinuxContainerExecutor extends ContainerExecutor {
     }
     buildMainArgs(command, user, appId, locId, nmAddr, localDirs);
     String[] commandArray = command.toArray(new String[command.size()]);
-    ShellCommandExecutor shExec = new ShellCommandExecutor(commandArray);
+    ShellCommandExecutor shExec = new ShellCommandExecutor(commandArray,
+		null, null, 0L, true);
     if (LOG.isDebugEnabled()) {
       LOG.debug("initApplication: " + Arrays.toString(commandArray));
     }