Procházet zdrojové kódy

MAPREDUCE-6225. Fix new findbug warnings in hadoop-mapreduce-client-core. Contributed by Varun Saxena

Junping Du před 10 roky
rodič
revize
49204fcf18
10 změnil soubory, kde provedl 24 přidání a 31 odebrání
  1. 3 0
      hadoop-mapreduce-project/CHANGES.txt
  2. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IndexCache.java
  3. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLogAppender.java
  4. 0 5
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/CombineFileRecordReader.java
  5. 6 9
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java
  6. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionHelper.java
  7. 0 5
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java
  8. 10 5
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java
  9. 1 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/InMemoryReader.java
  10. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ResourceBundles.java

+ 3 - 0
hadoop-mapreduce-project/CHANGES.txt

@@ -294,6 +294,9 @@ Release 2.7.0 - UNRELEASED
     MAPREDUCE-6256. Removed unused private methods in o.a.h.mapreduce.Job.java.
     (Naganarasimha G R via ozawa)
 
+    MAPREDUCE-6225. Fix new findbug warnings in hadoop-mapreduce-client-core. 
+    (Varun Saxena via junping_du)
+
   OPTIMIZATIONS
 
     MAPREDUCE-6169. MergeQueue should release reference to the current item 

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IndexCache.java

@@ -145,7 +145,7 @@ class IndexCache {
    */
   public void removeMap(String mapId) {
     IndexInformation info = cache.get(mapId);
-    if (info == null || ((info != null) && isUnderConstruction(info))) {
+    if (info == null || isUnderConstruction(info)) {
       return;
     }
     info = cache.remove(mapId);

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLogAppender.java

@@ -75,7 +75,7 @@ public class TaskLogAppender extends FileAppender implements Flushable {
 
     if (maxEvents == null) {
       String propValue = System.getProperty(LOGSIZE_PROPERTY, "0");
-      setTotalLogFileSize(Long.valueOf(propValue));
+      setTotalLogFileSize(Long.parseLong(propValue));
     }
   }
   

+ 0 - 5
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/CombineFileRecordReader.java

@@ -21,8 +21,6 @@ package org.apache.hadoop.mapred.lib;
 import java.io.*;
 import java.lang.reflect.*;
 
-import org.apache.hadoop.fs.FileSystem;
-
 import org.apache.hadoop.mapred.*;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -49,9 +47,7 @@ public class CombineFileRecordReader<K, V> implements RecordReader<K, V> {
   protected CombineFileSplit split;
   protected JobConf jc;
   protected Reporter reporter;
-  protected Class<RecordReader<K, V>> rrClass;
   protected Constructor<RecordReader<K, V>> rrConstructor;
-  protected FileSystem fs;
   
   protected int idx;
   protected long progress;
@@ -106,7 +102,6 @@ public class CombineFileRecordReader<K, V> implements RecordReader<K, V> {
     throws IOException {
     this.split = split;
     this.jc = job;
-    this.rrClass = rrClass;
     this.reporter = reporter;
     this.idx = 0;
     this.curReader = null;

+ 6 - 9
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java

@@ -390,10 +390,12 @@ class JobSubmitter {
     short replication = (short)conf.getInt(Job.SUBMIT_REPLICATION, 10);
     copyAndConfigureFiles(job, jobSubmitDir, replication);
 
-    // Set the working directory
-    if (job.getWorkingDirectory() == null) {
-      job.setWorkingDirectory(jtFs.getWorkingDirectory());
-    }
+    // Get the working directory. If not set, sets it to filesystem working dir
+    // This code has been added so that working directory reset before running
+    // the job. This is necessary for backward compatibility as other systems
+    // might use the public API JobConf#setWorkingDirectory to reset the working
+    // directory.
+    job.getWorkingDirectory();
 
   }
   /**
@@ -773,11 +775,6 @@ class JobSubmitter {
     if (!log4jPropertyFile.isEmpty()) {
       short replication = (short)conf.getInt(Job.SUBMIT_REPLICATION, 10);
       copyLog4jPropertyFile(job, jobSubmitDir, replication);
-
-      // Set the working directory
-      if (job.getWorkingDirectory() == null) {
-        job.setWorkingDirectory(jtFs.getWorkingDirectory());
-      }
     }
   }
 }

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionHelper.java

@@ -90,7 +90,7 @@ public class FieldSelectionHelper {
       }
       pos = fieldSpec.indexOf('-');
       if (pos < 0) {
-        Integer fn = new Integer(fieldSpec);
+        Integer fn = Integer.valueOf(fieldSpec);
         fieldList.add(fn);
       } else {
         String start = fieldSpec.substring(0, pos);

+ 0 - 5
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java

@@ -21,8 +21,6 @@ package org.apache.hadoop.mapreduce.lib.input;
 import java.io.*;
 import java.lang.reflect.*;
 
-import org.apache.hadoop.fs.FileSystem;
-
 import org.apache.hadoop.mapreduce.*;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -46,9 +44,7 @@ public class CombineFileRecordReader<K, V> extends RecordReader<K, V> {
                                           Integer.class};
 
   protected CombineFileSplit split;
-  protected Class<? extends RecordReader<K,V>> rrClass;
   protected Constructor<? extends RecordReader<K,V>> rrConstructor;
-  protected FileSystem fs;
   protected TaskAttemptContext context;
   
   protected int idx;
@@ -111,7 +107,6 @@ public class CombineFileRecordReader<K, V> extends RecordReader<K, V> {
     throws IOException {
     this.split = split;
     this.context = context;
-    this.rrClass = rrClass;
     this.idx = 0;
     this.curReader = null;
     this.progress = 0;

+ 10 - 5
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java

@@ -21,8 +21,8 @@ package org.apache.hadoop.mapreduce.security;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.PrintStream;
+import java.io.UnsupportedEncodingException;
 import java.net.URL;
-
 import javax.crypto.SecretKey;
 import javax.servlet.http.HttpServletRequest;
 
@@ -141,10 +141,15 @@ public class SecureShuffleUtils {
    */
   public static String toHex(byte[] ba) {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
-    PrintStream ps = new PrintStream(baos);
-    for (byte b : ba) {
-      ps.printf("%x", b);
+    String strHex = "";
+    try {
+      PrintStream ps = new PrintStream(baos, false, "UTF-8");
+      for (byte b : ba) {
+        ps.printf("%x", b);
+      }
+      strHex = baos.toString("UTF-8");
+    } catch (UnsupportedEncodingException e) {
     }
-    return baos.toString();
+    return strHex;
   }
 }

+ 1 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/InMemoryReader.java

@@ -79,10 +79,8 @@ public class InMemoryReader<K, V> extends Reader<K, V> {
     File dumpFile = new File("../output/" + taskAttemptId + ".dump");
     System.err.println("Dumping corrupt map-output of " + taskAttemptId + 
                        " to " + dumpFile.getAbsolutePath());
-    try {
-      FileOutputStream fos = new FileOutputStream(dumpFile);
+    try (FileOutputStream fos = new FileOutputStream(dumpFile)) {
       fos.write(buffer, 0, bufferSize);
-      fos.close();
     } catch (IOException ioe) {
       System.err.println("Failed to dump map-output of " + taskAttemptId);
     }

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ResourceBundles.java

@@ -59,7 +59,7 @@ public class ResourceBundles {
     catch (Exception e) {
       return defaultValue;
     }
-    return value == null ? defaultValue : value;
+    return value;
   }
 
   private static String getLookupKey(String key, String suffix) {