فهرست منبع

MAPREDUCE-7449: Add add-opens flag to container launch commands on JDK17 nodes (#5935)

Contributed by Benjamin Teke
Benjamin Teke 1 سال پیش
والد
کامیت
2684221653
9فایلهای تغییر یافته به همراه61 افزوده شده و 5 حذف شده
  1. 3 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestMapReduceChildJVM.java
  2. 8 0
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java
  3. 9 0
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java
  4. 12 0
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
  5. 7 0
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java
  6. 7 0
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java
  7. 2 0
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java
  8. 7 0
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java
  9. 6 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java

+ 3 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestMapReduceChildJVM.java

@@ -52,6 +52,7 @@ public class TestMapReduceChildJVM {
     MyMRApp app = new MyMRApp(1, 0, true, this.getClass().getName(), true);
     Configuration conf = new Configuration();
     conf.setBoolean(MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM, true);
+    conf.setBoolean(MRJobConfig.MAPREDUCE_JVM_ADD_OPENS_JAVA_OPT, false);
     Job job = app.submit(conf);
     app.waitForState(job, JobState.SUCCEEDED);
     app.verifyCompleted();
@@ -123,7 +124,7 @@ public class TestMapReduceChildJVM {
         "[" + MRApps.crossPlatformify("JAVA_HOME") + "/bin/java" +
             " -Djava.net.preferIPv4Stack=true" +
             " -Dhadoop.metrics.log.level=WARN " +
-            "  -Xmx820m -Djava.io.tmpdir=" + MRApps.crossPlatformify("PWD") + "/tmp" +
+            "  -Xmx820m <ADD_OPENS> -Djava.io.tmpdir=" + MRApps.crossPlatformify("PWD") + "/tmp" +
             " -Dlog4j.configuration=container-log4j.properties" +
             " -Dyarn.app.container.log.dir=<LOG_DIR>" +
             " -Dyarn.app.container.log.filesize=0" +
@@ -165,7 +166,7 @@ public class TestMapReduceChildJVM {
       "[" + MRApps.crossPlatformify("JAVA_HOME") + "/bin/java" +
       " -Djava.net.preferIPv4Stack=true" +
       " -Dhadoop.metrics.log.level=WARN " +
-      "  -Xmx820m -Djava.io.tmpdir=" + MRApps.crossPlatformify("PWD") + "/tmp" +
+      "  -Xmx820m <ADD_OPENS> -Djava.io.tmpdir=" + MRApps.crossPlatformify("PWD") + "/tmp" +
       " -Dlog4j.configuration=" + testLogPropertieFile +
       " -Dyarn.app.container.log.dir=<LOG_DIR>" +
       " -Dyarn.app.container.log.filesize=0" +

+ 8 - 0
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java

@@ -51,6 +51,7 @@ import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.util.ClassUtil;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.yarn.api.ApplicationConstants;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -2207,6 +2208,13 @@ public class JobConf extends Configuration {
       javaOpts += " " + xmxArg;
     }
 
+    // JDK17 support: automatically add --add-opens=java.base/java.lang=ALL-UNNAMED
+    // so the tasks can launch on a JDK17 node.
+    if (getBoolean(MRJobConfig.MAPREDUCE_JVM_ADD_OPENS_JAVA_OPT,
+            MRJobConfig.MAPREDUCE_JVM_ADD_OPENS_JAVA_OPT_DEFAULT)) {
+      javaOpts += " " + ApplicationConstants.JVM_ADD_OPENS_VAR;
+    }
+
     return javaOpts;
   }
 

+ 9 - 0
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java

@@ -305,6 +305,15 @@ public interface MRJobConfig {
     "os.name,os.version,java.home,java.runtime.version,java.vendor," +
     "java.version,java.vm.name,java.class.path,java.io.tmpdir,user.dir,user.name";
 
+  /*
+   * Flag to indicate whether JDK17's required add-opens flags should be added to MR AM and
+   * map/reduce containers regardless of the user specified java opts.
+   */
+  public static final String MAPREDUCE_JVM_ADD_OPENS_JAVA_OPT =
+    "mapreduce.jvm.add-opens-as-default";
+
+  public static final boolean MAPREDUCE_JVM_ADD_OPENS_JAVA_OPT_DEFAULT = true;
+
   public static final String IO_SORT_FACTOR = "mapreduce.task.io.sort.factor";
 
   public static final int DEFAULT_IO_SORT_FACTOR = 10;

+ 12 - 0
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml

@@ -1811,6 +1811,18 @@
    <description>Comma-delimited list of system properties to log on mapreduce JVM start</description>
 </property>
 
+<property>
+  <name>mapreduce.jvm.add-opens-as-default</name>
+  <value>true</value>
+  <description>Since on JDK17 it's no longer possible to use the reflection API to
+    access non-public fields and methods add-opens flags should be added to MR AM
+    and map/reduce containers regardless of the user specified java opts. Setting
+    this to true will add the flags to the container launch commands on nodes with
+    JDK17 or higher. Defaults to true, but the setting has no effect on nodes using
+    JDK16 and before.
+  </description>
+</property>
+
 <!-- jobhistory properties -->
 
 <property>

+ 7 - 0
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java

@@ -485,6 +485,13 @@ public class YARNRunner implements ClientProtocol {
         MRJobConfig.MR_AM_COMMAND_OPTS, MRJobConfig.MR_AM_ENV);
     vargs.add(mrAppMasterUserOptions);
 
+    // JDK17 support: automatically add --add-opens=java.base/java.lang=ALL-UNNAMED
+    // so the tasks can launch on a JDK17 node.
+    if (conf.getBoolean(MRJobConfig.MAPREDUCE_JVM_ADD_OPENS_JAVA_OPT,
+            MRJobConfig.MAPREDUCE_JVM_ADD_OPENS_JAVA_OPT_DEFAULT)) {
+      vargs.add(ApplicationConstants.JVM_ADD_OPENS_VAR);
+    }
+
     if (jobConf.getBoolean(MRJobConfig.MR_AM_PROFILE,
         MRJobConfig.DEFAULT_MR_AM_PROFILE)) {
       final String profileParams = jobConf.get(MRJobConfig.MR_AM_PROFILE_PARAMS,

+ 7 - 0
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java

@@ -77,6 +77,13 @@ public interface ApplicationConstants {
   String APPLICATION_WEB_PROXY_BASE_ENV =
     "APPLICATION_WEB_PROXY_BASE";
 
+  /**
+   * The environmental variable for JDK17's add-opens workaround. This
+   * should be replaced either a correctly formatted add-opens option if JDK17 is used
+   * or an empty string if not on container launch.
+   */
+  String JVM_ADD_OPENS_VAR = "<ADD_OPENS>";
+
   /**
    * The temporary environmental variable for container log directory. This
    * should be replaced by real container log directory on container launch.

+ 2 - 0
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java

@@ -953,6 +953,8 @@ public class Client {
     vargs.add("\"" + Environment.JAVA_HOME.$$() + "/bin/java\"");
     // Set Xmx based on am memory size
     vargs.add("-Xmx" + amMemory + "m");
+    // JDK17 support
+    vargs.add(ApplicationConstants.JVM_ADD_OPENS_VAR);
     // Set class name 
     vargs.add(appMasterMainClass);
     // Set params for Application Master

+ 7 - 0
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java

@@ -168,6 +168,13 @@ public class ContainerLaunch implements Callable<Integer> {
     var = var.replace(ApplicationConstants.CLASS_PATH_SEPARATOR,
       File.pathSeparator);
 
+    if (Shell.isJavaVersionAtLeast(17)) {
+      var = var.replace(ApplicationConstants.JVM_ADD_OPENS_VAR,
+              "--add-opens=java.base/java.lang=ALL-UNNAMED");
+    } else {
+      var = var.replace(ApplicationConstants.JVM_ADD_OPENS_VAR, "");
+    }
+
     // replace parameter expansion marker. e.g. {{VAR}} on Windows is replaced
     // as %VAR% and on Linux replaced as "$VAR"
     if (Shell.WINDOWS) {

+ 6 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java

@@ -574,18 +574,21 @@ public class TestContainerLaunch extends BaseContainerManagerTest {
             + Apps.crossPlatformify("HADOOP_HOME") + "/share/hadoop/common/lib/*"
             + ApplicationConstants.CLASS_PATH_SEPARATOR
             + Apps.crossPlatformify("HADOOP_LOG_HOME")
-            + ApplicationConstants.LOG_DIR_EXPANSION_VAR;
+            + ApplicationConstants.LOG_DIR_EXPANSION_VAR
+            + " " + ApplicationConstants.JVM_ADD_OPENS_VAR;
 
     String res = ContainerLaunch.expandEnvironment(input, logPath);
 
+    String expectedAddOpens = Shell.isJavaVersionAtLeast(17) ?
+        "--add-opens=java.base/java.lang=ALL-UNNAMED" : "";
     if (Shell.WINDOWS) {
       Assert.assertEquals("%HADOOP_HOME%/share/hadoop/common/*;"
           + "%HADOOP_HOME%/share/hadoop/common/lib/*;"
-          + "%HADOOP_LOG_HOME%/nm/container/logs", res);
+          + "%HADOOP_LOG_HOME%/nm/container/logs" + " " + expectedAddOpens, res);
     } else {
       Assert.assertEquals("$HADOOP_HOME/share/hadoop/common/*:"
           + "$HADOOP_HOME/share/hadoop/common/lib/*:"
-          + "$HADOOP_LOG_HOME/nm/container/logs", res);
+          + "$HADOOP_LOG_HOME/nm/container/logs" + " " + expectedAddOpens, res);
     }
     System.out.println(res);
   }