Browse Source

YARN-2937. Fixed new findbugs warnings in hadoop-yarn-nodemanager. Contributed by Varun Saxena.

(cherry picked from commit 41a548a916d4248164cb9495320f123ec215d70e)
Zhijie Shen 10 năm trước cách đây
mục cha
commit
f02bd6683a
11 tập tin đã thay đổi với 61 bổ sung47 xóa
  1. 3 0
      hadoop-yarn-project/CHANGES.txt
  2. 10 0
      hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml
  3. 1 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java
  4. 4 6
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java
  5. 8 8
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java
  6. 0 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java
  7. 3 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/WindowsSecureContainerExecutor.java
  8. 0 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java
  9. 24 18
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/CgroupsLCEResourcesHandler.java
  10. 5 7
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java
  11. 3 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java

+ 3 - 0
hadoop-yarn-project/CHANGES.txt

@@ -246,6 +246,9 @@ Release 2.7.0 - UNRELEASED
     YARN-2940. Fix new findbugs warnings in rest of the hadoop-yarn components. (Li Lu 
     via junping_du)
 
+    YARN-2937. Fixed new findbugs warnings in hadoop-yarn-nodemanager. (Varun Saxena
+    via zjshen)
+
 Release 2.6.0 - 2014-11-18
 
   INCOMPATIBLE CHANGES

+ 10 - 0
hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml

@@ -407,4 +407,14 @@
     <Class name="org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils$KeyParser" />
     <Bug pattern="EI_EXPOSE_REP2" />
   </Match>
+
+  <!-- Ignore unrelated RV_RETURN_VALUE_IGNORED_BAD_PRACTICE warnings. No need to wait for result from executor service -->
+  <Match>
+    <Class name="org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainersLauncher" />
+    <Bug pattern="RV_RETURN_VALUE_IGNORED_BAD_PRACTICE" />
+  </Match>
+  <Match>
+    <Class name="org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.sharedcache.SharedCacheUploadService" />
+    <Bug pattern="RV_RETURN_VALUE_IGNORED_BAD_PRACTICE" />
+  </Match>
 </FindBugsFilter>

+ 1 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java

@@ -226,7 +226,7 @@ public abstract class ContainerExecutor implements Configurable {
 
     PrintStream pout = null;
     try {
-      pout = new PrintStream(out);
+      pout = new PrintStream(out, false, "UTF-8");
       sb.write(pout);
     } finally {
       if (out != null) {

+ 4 - 6
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java

@@ -32,12 +32,11 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.EnumSet;
 import java.util.List;
-import java.util.Random;
 import java.util.Map;
 
+import org.apache.commons.lang.math.RandomUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileContext;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.UnsupportedFileSystemException;
@@ -292,7 +291,7 @@ public class DefaultContainerExecutor extends ContainerExecutor {
 
       try {
         out = lfs.create(wrapperScriptPath, EnumSet.of(CREATE, OVERWRITE));
-        pout = new PrintStream(out);
+        pout = new PrintStream(out, false, "UTF-8");
         writeLocalWrapperScript(launchDst, pidFile, pout);
       } finally {
         IOUtils.cleanup(LOG, pout, out);
@@ -345,7 +344,7 @@ public class DefaultContainerExecutor extends ContainerExecutor {
       PrintStream pout = null;
       try {
         out = lfs.create(sessionScriptPath, EnumSet.of(CREATE, OVERWRITE));
-        pout = new PrintStream(out);
+        pout = new PrintStream(out, false, "UTF-8");
         // We need to do a move as writing to a file is not atomic
         // Process reading a file being written to may get garbled data
         // hence write pid to tmp file first followed by a mv
@@ -539,8 +538,7 @@ public class DefaultContainerExecutor extends ContainerExecutor {
 
     // make probability to pick a directory proportional to
     // the available space on the directory.
-    Random r = new Random();
-    long randomPosition = Math.abs(r.nextLong()) % totalAvailable;
+    long randomPosition = RandomUtils.nextLong() % totalAvailable;
     int dir = 0;
     // skip zero available space directory,
     // because totalAvailable is greater than 0 and randomPosition

+ 8 - 8
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java

@@ -22,6 +22,8 @@ import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
+
+import org.apache.commons.lang.math.RandomUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
@@ -59,7 +61,6 @@ import java.util.Random;
 import java.util.Set;
 import java.util.regex.Pattern;
 import java.net.InetSocketAddress;
-
 import static org.apache.hadoop.fs.CreateFlag.CREATE;
 import static org.apache.hadoop.fs.CreateFlag.OVERWRITE;
 
@@ -310,9 +311,9 @@ public class DockerContainerExecutor extends ContainerExecutor {
     PrintStream ps = null;
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     try {
-      pout = new PrintStream(out);
+      pout = new PrintStream(out, false, "UTF-8");
       if (LOG.isDebugEnabled()) {
-        ps = new PrintStream(baos);
+        ps = new PrintStream(baos, false, "UTF-8");
         sb.write(ps);
       }
       sb.write(pout);
@@ -326,7 +327,7 @@ public class DockerContainerExecutor extends ContainerExecutor {
       }
     }
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Script: " + baos.toString());
+      LOG.debug("Script: " + baos.toString("UTF-8"));
     }
   }
 
@@ -440,7 +441,7 @@ public class DockerContainerExecutor extends ContainerExecutor {
 
       try {
         out = lfs.create(wrapperScriptPath, EnumSet.of(CREATE, OVERWRITE));
-        pout = new PrintStream(out);
+        pout = new PrintStream(out, false, "UTF-8");
         writeLocalWrapperScript(launchDst, pidFile, pout);
       } finally {
         IOUtils.cleanup(LOG, pout, out);
@@ -498,7 +499,7 @@ public class DockerContainerExecutor extends ContainerExecutor {
       PrintStream pout = null;
       try {
         out = lfs.create(sessionScriptPath, EnumSet.of(CREATE, OVERWRITE));
-        pout = new PrintStream(out);
+        pout = new PrintStream(out, false, "UTF-8");
         // We need to do a move as writing to a file is not atomic
         // Process reading a file being written to may get garbled data
         // hence write pid to tmp file first followed by a mv
@@ -736,8 +737,7 @@ public class DockerContainerExecutor extends ContainerExecutor {
 
     // make probability to pick a directory proportional to
     // the available space on the directory.
-    Random r = new Random();
-    long randomPosition = Math.abs(r.nextLong()) % totalAvailable;
+    long randomPosition = RandomUtils.nextLong() % totalAvailable;
     int dir = 0;
     // skip zero available space directory,
     // because totalAvailable is greater than 0 and randomPosition

+ 0 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java

@@ -299,9 +299,6 @@ public class LinuxContainerExecutor extends ContainerExecutor {
         return ExitCode.TERMINATED.getExitCode();
       }
     } catch (ExitCodeException e) {
-      if (null == shExec) {
-        return -1;
-      }
       int exitCode = shExec.getExitCode();
       LOG.warn("Exit code from container " + containerId + " is : " + exitCode);
       // 143 (SIGTERM) and 137 (SIGKILL) exit codes means the container was

+ 3 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/WindowsSecureContainerExecutor.java

@@ -29,6 +29,7 @@ import java.io.OutputStream;
 import java.io.PrintStream;
 import java.net.InetSocketAddress;
 import java.net.URISyntaxException;
+import java.nio.charset.Charset;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -501,7 +502,7 @@ public class WindowsSecureContainerExecutor extends DefaultContainerExecutor {
           try
           {
             BufferedReader lines = new BufferedReader(
-                new InputStreamReader(stream));
+                new InputStreamReader(stream, Charset.forName("UTF-8")));
             char[] buf = new char[512];
             int nRead;
             while ((nRead = lines.read(buf, 0, buf.length)) > 0) {
@@ -718,7 +719,7 @@ public class WindowsSecureContainerExecutor extends DefaultContainerExecutor {
        }
        catch(Throwable e) {
          LOG.warn(String.format(
-             "An exception occured during the cleanup of localizer job %s:\n%s", 
+             "An exception occured during the cleanup of localizer job %s:%n%s", 
              localizerPid, 
              org.apache.hadoop.util.StringUtils.stringifyException(e)));
        }

+ 0 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java

@@ -37,7 +37,6 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.Credentials;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
 import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;

+ 24 - 18
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/CgroupsLCEResourcesHandler.java

@@ -20,9 +20,13 @@ package org.apache.hadoop.yarn.server.nodemanager.util;
 
 import java.io.BufferedReader;
 import java.io.File;
-import java.io.FileReader;
-import java.io.FileWriter;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.OutputStreamWriter;
+import java.io.PrintWriter;
+import java.io.Writer;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -38,6 +42,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
@@ -235,7 +240,6 @@ public class CgroupsLCEResourcesHandler implements LCEResourcesHandler {
 
   private void updateCgroup(String controller, String groupName, String param,
                             String value) throws IOException {
-    FileWriter f = null;
     String path = pathForCgroup(controller, groupName);
     param = controller + "." + param;
 
@@ -243,19 +247,25 @@ public class CgroupsLCEResourcesHandler implements LCEResourcesHandler {
       LOG.debug("updateCgroup: " + path + ": " + param + "=" + value);
     }
 
+    PrintWriter pw = null;
     try {
-      f = new FileWriter(path + "/" + param, false);
-      f.write(value);
+      File file = new File(path + "/" + param);
+      Writer w = new OutputStreamWriter(new FileOutputStream(file), "UTF-8");
+      pw = new PrintWriter(w);
+      pw.write(value);
     } catch (IOException e) {
       throw new IOException("Unable to set " + param + "=" + value +
           " for cgroup at: " + path, e);
     } finally {
-      if (f != null) {
-        try {
-          f.close();
-        } catch (IOException e) {
-          LOG.warn("Unable to close cgroup file: " +
-              path, e);
+      if (pw != null) {
+        boolean hasError = pw.checkError();
+        pw.close();
+        if(hasError) {
+          throw new IOException("Unable to set " + param + "=" + value +
+                " for cgroup at: " + path);
+        }
+        if(pw.checkError()) {
+          throw new IOException("Error while closing cgroup file " + path);
         }
       }
     }
@@ -376,7 +386,8 @@ public class CgroupsLCEResourcesHandler implements LCEResourcesHandler {
     BufferedReader in = null;
 
     try {
-      in = new BufferedReader(new FileReader(new File(getMtabFileName())));
+      FileInputStream fis = new FileInputStream(new File(getMtabFileName()));
+      in = new BufferedReader(new InputStreamReader(fis, "UTF-8"));
 
       for (String str = in.readLine(); str != null;
           str = in.readLine()) {
@@ -396,12 +407,7 @@ public class CgroupsLCEResourcesHandler implements LCEResourcesHandler {
     } catch (IOException e) {
       throw new IOException("Error while reading " + getMtabFileName(), e);
     } finally {
-      // Close the streams
-      try {
-        in.close();
-      } catch (IOException e2) {
-        LOG.warn("Error closing the stream: " + getMtabFileName(), e2);
-      }
+      IOUtils.cleanup(LOG, in);
     }
 
     return ret;

+ 5 - 7
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java

@@ -19,8 +19,9 @@ package org.apache.hadoop.yarn.server.nodemanager.util;
 
 import java.io.BufferedReader;
 import java.io.File;
-import java.io.FileReader;
+import java.io.FileInputStream;
 import java.io.IOException;
+import java.io.InputStreamReader;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -49,14 +50,14 @@ public class ProcessIdFileReader {
     
     LOG.debug("Accessing pid from pid file " + path);
     String processId = null;
-    FileReader fileReader = null;
     BufferedReader bufReader = null;
 
     try {
       File file = new File(path.toString());
       if (file.exists()) {
-        fileReader = new FileReader(file);
-        bufReader = new BufferedReader(fileReader);
+        FileInputStream fis = new FileInputStream(file);
+        bufReader = new BufferedReader(new InputStreamReader(fis, "UTF-8"));
+
         while (true) {
           String line = bufReader.readLine();
           if (line == null) {
@@ -91,9 +92,6 @@ public class ProcessIdFileReader {
         }
       }
     } finally {
-      if (fileReader != null) {
-        fileReader.close();
-      }
       if (bufReader != null) {
         bufReader.close();
       }

+ 3 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java

@@ -28,6 +28,7 @@ import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStreamReader;
+import java.nio.charset.Charset;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
@@ -151,7 +152,8 @@ public class ContainerLogsPage extends NMView {
           }
           
           IOUtils.skipFully(logByteStream, start);
-          InputStreamReader reader = new InputStreamReader(logByteStream);
+          InputStreamReader reader =
+              new InputStreamReader(logByteStream, Charset.forName("UTF-8"));
           int bufferSize = 65536;
           char[] cbuf = new char[bufferSize];