Sfoglia il codice sorgente

Reverting MAPREDUCE-5028 (commit 1457918)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1458433 13f79535-47bb-0310-9956-ffa450edef68
Alejandro Abdelnur 12 anni fa
parent
commit
c19633da5b

+ 0 - 3
hadoop-mapreduce-project/CHANGES.txt

@@ -238,9 +238,6 @@ Release 2.0.5-beta - UNRELEASED
     MAPREDUCE-4716. TestHsWebServicesJobsQuery.testJobsQueryStateInvalid 
     fails with jdk7. (tgraves via tucu)
 
-    MAPREDUCE-5028. Maps fail when io.sort.mb is set to high value. 
-    (kkambatl via tucu)
-
 Release 2.0.4-alpha - UNRELEASED
 
   INCOMPATIBLE CHANGES

+ 5 - 7
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java

@@ -1165,9 +1165,8 @@ public class MapTask extends Task {
       equator = pos;
       // set index prior to first entry, aligned at meta boundary
       final int aligned = pos - (pos % METASIZE);
-      // Cast one of the operands to long to ensure large values don't cause int
-      // overflow
-      kvindex = (int) (((long) aligned - METASIZE + kvbuffer.length) % kvbuffer.length) / 4;
+      kvindex =
+        ((aligned - METASIZE + kvbuffer.length) % kvbuffer.length) / 4;
       if (LOG.isInfoEnabled()) {
         LOG.info("(EQUATOR) " + pos + " kvi " + kvindex +
             "(" + (kvindex * 4) + ")");
@@ -1184,9 +1183,8 @@ public class MapTask extends Task {
       bufstart = bufend = e;
       final int aligned = e - (e % METASIZE);
       // set start/end to point to first meta record
-      // Cast one of the operands to long to ensure large values don't cause int
-      // overflow
-      kvstart = kvend = (int) (((long) aligned - METASIZE + kvbuffer.length) % kvbuffer.length) / 4;
+      kvstart = kvend =
+        ((aligned - METASIZE + kvbuffer.length) % kvbuffer.length) / 4;
       if (LOG.isInfoEnabled()) {
         LOG.info("(RESET) equator " + e + " kv " + kvstart + "(" +
           (kvstart * 4) + ")" + " kvi " + kvindex + "(" + (kvindex * 4) + ")");
@@ -1750,7 +1748,7 @@ public class MapTask extends Task {
           this.start = 0;
         }
 
-        super.reset(this.buffer, this.start, this.length - this.start);
+        super.reset(this.buffer, this.start, this.length);
       }
     }
 

+ 1 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/ReduceContextImpl.java

@@ -205,8 +205,7 @@ public class ReduceContextImpl<KEYIN,VALUEIN,KEYOUT,VALUEOUT>
           if (backupStore.hasNext()) {
             backupStore.next();
             DataInputBuffer next = backupStore.nextValue();
-            buffer.reset(next.getData(), next.getPosition(), next.getLength()
-                - next.getPosition());
+            buffer.reset(next.getData(), next.getPosition(), next.getLength());
             value = valueDeserializer.deserialize(value);
             return value;
           } else {

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/InMemoryReader.java

@@ -49,14 +49,14 @@ public class InMemoryReader<K, V> extends Reader<K, V> {
 
     buffer = data;
     bufferSize = (int)fileLength;
-    memDataIn.reset(buffer, start, length - start);
+    memDataIn.reset(buffer, start, length);
     this.start = start;
     this.length = length;
   }
 
   @Override
   public void reset(int offset) {
-    memDataIn.reset(buffer, start + offset, length - start - offset);
+    memDataIn.reset(buffer, start + offset, length);
     bytesRead = offset;
     eof = false;
   }