Browse Source

MAPREDUCE-6243. Fix findbugs warnings in hadoop-rumen. Contributed by Masatake Iwasaki.

Akira Ajisaka 10 years ago
parent
commit
34fe11c987

+ 3 - 0
hadoop-mapreduce-project/CHANGES.txt

@@ -330,6 +330,9 @@ Release 2.7.0 - UNRELEASED
     MAPREDUCE-6231. Grep example job is not working on a fully-distributed
     cluster. (aajisaka)
 
+    MAPREDUCE-6243. Fix findbugs warnings in hadoop-rumen. (Masatake Iwasaki
+    via aajisaka)
+
 Release 2.6.0 - 2014-11-18
 
   INCOMPATIBLE CHANGES

+ 0 - 5
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Hadoop20JHParser.java

@@ -192,11 +192,6 @@ public class Hadoop20JHParser implements JobHistoryParser {
 
     do {
       addedLine = getOneLine();
-
-      if (addedLine == null) {
-        return sb.toString();
-      }
-
       sb.append("\n");
       sb.append(addedLine);
     } while (addedLine.length() < endLineString.length()

+ 1 - 1
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java

@@ -559,7 +559,7 @@ public class HadoopLogsAnalyzer extends Configured implements Tool {
     input =
         maybeUncompressedPath(new Path(inputDirectoryPath, currentFileName));
 
-    return input != null;
+    return true;
   }
 
   private String readInputLine() throws IOException {

+ 1 - 1
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/MapAttempt20LineHistoryEventEmitter.java

@@ -67,7 +67,7 @@ public class MapAttempt20LineHistoryEventEmitter extends
         MapAttempt20LineHistoryEventEmitter that =
             (MapAttempt20LineHistoryEventEmitter) thatg;
 
-        if (finishTime != null && "success".equalsIgnoreCase(status)) {
+        if ("success".equalsIgnoreCase(status)) {
           return new MapAttemptFinishedEvent
             (taskAttemptID,
               that.originalTaskType, status,

+ 7 - 4
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ParsedConfigFile.java

@@ -25,6 +25,8 @@ import java.io.InputStream;
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 
+import java.nio.charset.Charset;
+
 import javax.xml.parsers.DocumentBuilderFactory;
 import javax.xml.parsers.DocumentBuilder;
 import javax.xml.parsers.ParserConfigurationException;
@@ -44,6 +46,7 @@ class ParsedConfigFile {
       Pattern.compile("_(job_[0-9]+_[0-9]+)_");
   private static final Pattern heapPattern =
       Pattern.compile("-Xmx([0-9]+)([mMgG])");
+  private static final Charset UTF_8 = Charset.forName("UTF-8");
 
   final int heapMegabytes;
 
@@ -100,7 +103,7 @@ class ParsedConfigFile {
     }
 
     try {
-      InputStream is = new ByteArrayInputStream(xmlString.getBytes());
+      InputStream is = new ByteArrayInputStream(xmlString.getBytes(UTF_8));
 
       DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
 
@@ -151,7 +154,7 @@ class ParsedConfigFile {
         
         properties.setProperty(attr, value);
 
-        if ("mapred.child.java.opts".equals(attr) && value != null) {
+        if ("mapred.child.java.opts".equals(attr)) {
           Matcher matcher = heapPattern.matcher(value);
           if (matcher.find()) {
             String heapSize = matcher.group(1);
@@ -164,11 +167,11 @@ class ParsedConfigFile {
           }
         }
 
-        if (MRJobConfig.QUEUE_NAME.equals(attr) && value != null) {
+        if (MRJobConfig.QUEUE_NAME.equals(attr)) {
           queue = value;
         }
 
-        if (MRJobConfig.JOB_NAME.equals(attr) && value != null) {
+        if (MRJobConfig.JOB_NAME.equals(attr)) {
           jobName = value;
         }
 

+ 3 - 1
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/RandomSeedGenerator.java

@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.tools.rumen;
 
+import java.nio.charset.Charset;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 
@@ -42,6 +43,7 @@ import org.apache.commons.logging.LogFactory;
  */
 public class RandomSeedGenerator {
   private static Log LOG = LogFactory.getLog(RandomSeedGenerator.class);
+  private static final Charset UTF_8 = Charset.forName("UTF-8");
   
   /** MD5 algorithm instance, one for each thread. */
   private static final ThreadLocal<MessageDigest> md5Holder =
@@ -72,7 +74,7 @@ public class RandomSeedGenerator {
     // We could have fed the bytes of masterSeed one by one to md5.update()
     // instead
     String str = streamId + '/' + masterSeed;
-    byte[] digest = md5.digest(str.getBytes());
+    byte[] digest = md5.digest(str.getBytes(UTF_8));
     // Create a long from the first 8 bytes of the digest
     // This is fine as MD5 has the avalanche property.
     // Paranoids could have XOR folded the other 8 bytes in too. 

+ 1 - 1
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ReduceAttempt20LineHistoryEventEmitter.java

@@ -66,7 +66,7 @@ public class ReduceAttempt20LineHistoryEventEmitter
         String shuffleFinish = line.get("SHUFFLE_FINISHED");
         String sortFinish = line.get("SORT_FINISHED");
 
-        if (finishTime != null && shuffleFinish != null && sortFinish != null
+        if (shuffleFinish != null && sortFinish != null
             && "success".equalsIgnoreCase(status)) {
           ReduceAttempt20LineHistoryEventEmitter that =
               (ReduceAttempt20LineHistoryEventEmitter) thatg;