فهرست منبع

MAPREDUCE-5256. CombineInputFormat isn't thread safe affecting HiveServer. Contributed by Vinod Kumar Vavilapalli.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-1.2@1500340 13f79535-47bb-0310-9956-ffa450edef68
Matthew Foley 12 سال پیش
والد
کامیت
338a7b4522
2فایلهای تغییر یافته به همراه12 افزوده شده و 6 حذف شده
  1. 3 0
      CHANGES.txt
  2. 9 6
      src/mapred/org/apache/hadoop/mapred/lib/CombineFileInputFormat.java

+ 3 - 0
CHANGES.txt

@@ -13,6 +13,9 @@ Release 1.2.1 - 2013.07.06
 
   BUG FIXES
 
+    MAPREDUCE-5256. CombineInputFormat isn't thread safe affecting HiveServer.
+    (Vinod Kumar Vavilapalli, via mattf)
+
     HADOOP-9504 MetricsDynamicMBeanBase has concurrency issues in createMBeanInfo. 
     (Liang Xie and Jason Lowe, via mattf)
 

+ 9 - 6
src/mapred/org/apache/hadoop/mapred/lib/CombineFileInputFormat.java

@@ -75,7 +75,7 @@ public abstract class CombineFileInputFormat<K, V>
   private ArrayList<MultiPathFilter> pools = new  ArrayList<MultiPathFilter>();
 
   // mapping from a rack name to the set of Nodes in the rack 
-  private static HashMap<String, Set<String>> rackToNodes = 
+  private HashMap<String, Set<String>> rackToNodes = 
                             new HashMap<String, Set<String>>();
   /**
    * Specify the maximum size (in bytes) of each split. Each split is
@@ -255,7 +255,8 @@ public abstract class CombineFileInputFormat<K, V>
     long totLength = 0;
     for (int i = 0; i < paths.length; i++) {
       files[i] = new OneFileInfo(paths[i], job, 
-                                 rackToBlocks, blockToNodes, nodeToBlocks);
+                                 rackToBlocks, blockToNodes, nodeToBlocks,
+                                 rackToNodes);
       totLength += files[i].getLength();
     }
 
@@ -453,7 +454,8 @@ public abstract class CombineFileInputFormat<K, V>
     OneFileInfo(Path path, JobConf job,
                 HashMap<String, List<OneBlockInfo>> rackToBlocks,
                 HashMap<OneBlockInfo, String[]> blockToNodes,
-                HashMap<String, List<OneBlockInfo>> nodeToBlocks)
+                HashMap<String, List<OneBlockInfo>> nodeToBlocks,
+                HashMap<String, Set<String>> rackToNodes)
                 throws IOException {
       this.fileSize = 0;
 
@@ -490,7 +492,7 @@ public abstract class CombineFileInputFormat<K, V>
             }
             blklist.add(oneblock);
             // Add this host to rackToNodes map
-            addHostToRack(oneblock.racks[j], oneblock.hosts[j]);
+            addHostToRack(rackToNodes, oneblock.racks[j], oneblock.hosts[j]);
          }
 
           // add this block to the node --> block map
@@ -554,7 +556,8 @@ public abstract class CombineFileInputFormat<K, V>
     }
   }
 
-  private static void addHostToRack(String rack, String host) {
+  private static void addHostToRack(HashMap<String, Set<String>> rackToNodes,
+                                   String rack, String host) {
     Set<String> hosts = rackToNodes.get(rack);
     if (hosts == null) {
       hosts = new HashSet<String>();
@@ -563,7 +566,7 @@ public abstract class CombineFileInputFormat<K, V>
     hosts.add(host);
   }
   
-  private static List<String> getHosts(List<String> racks) {
+  private List<String> getHosts(List<String> racks) {
     List<String> hosts = new ArrayList<String>();
     for (String rack : racks) {
       hosts.addAll(rackToNodes.get(rack));