Переглянути джерело

HADOOP-5571. Remove widening primitive conversion in TupleWritable mask
manipulation. Contributed by Jingkei Ly


git-svn-id: https://svn.apache.org/repos/asf/hadoop/core/branches/branch-0.20@759031 13f79535-47bb-0310-9956-ffa450edef68

Christopher Douglas 16 роки тому
батько
коміт
a5e6f59af2

+ 3 - 0
CHANGES.txt

@@ -811,6 +811,9 @@ Release 0.20.0 - Unreleased
     HADOOP-5459. Fix undetected CRC errors where intermediate output is closed
     before it has been completely consumed. (cdouglas)
 
+    HADOOP-5571. Remove widening primitive conversion in TupleWritable mask
+    manipulation. (Jingkei Ly via cdouglas)
+
 Release 0.19.2 - Unreleased
 
   BUG FIXES

+ 3 - 3
src/mapred/org/apache/hadoop/mapred/join/TupleWritable.java

@@ -63,7 +63,7 @@ public class TupleWritable implements Writable, Iterable<Writable> {
    * Return true if tuple has an element at the position provided.
    */
   public boolean has(int i) {
-    return 0 != ((1 << i) & written);
+    return 0 != ((1L << i) & written);
   }
 
   /**
@@ -205,7 +205,7 @@ public class TupleWritable implements Writable, Iterable<Writable> {
    * Record that the tuple contains an element at the position provided.
    */
   void setWritten(int i) {
-    written |= 1 << i;
+    written |= 1L << i;
   }
 
   /**
@@ -213,7 +213,7 @@ public class TupleWritable implements Writable, Iterable<Writable> {
    * provided.
    */
   void clearWritten(int i) {
-    written &= -1 ^ (1 << i);
+    written &= -1 ^ (1L << i);
   }
 
   /**

+ 41 - 0
src/test/org/apache/hadoop/mapred/join/TestTupleWritable.java

@@ -22,6 +22,7 @@ import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
 
+import java.util.Arrays;
 import java.util.Random;
 
 import junit.framework.TestCase;
@@ -31,6 +32,7 @@ import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.FloatWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 
 public class TestTupleWritable extends TestCase {
@@ -130,4 +132,43 @@ public class TestTupleWritable extends TestCase {
     assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
   }
 
+  public void testWideTuple() throws Exception {
+    Text emptyText = new Text("Should be empty");
+    Writable[] values = new Writable[64];
+    Arrays.fill(values,emptyText);
+    values[42] = new Text("Number 42");
+                                     
+    TupleWritable tuple = new TupleWritable(values);
+    tuple.setWritten(42);
+    
+    for (int pos=0; pos<tuple.size();pos++) {
+      boolean has = tuple.has(pos);
+      if (pos == 42) {
+        assertTrue(has);
+      }
+      else {
+        assertFalse("Tuple position is incorrectly labelled as set: " + pos, has);
+      }
+    }
+  }
+  
+  public void testWideTuple2() throws Exception {
+    Text emptyText = new Text("Should be empty");
+    Writable[] values = new Writable[64];
+    Arrays.fill(values,emptyText);
+    values[9] = new Text("Number 9");
+                                     
+    TupleWritable tuple = new TupleWritable(values);
+    tuple.setWritten(9);
+    
+    for (int pos=0; pos<tuple.size();pos++) {
+      boolean has = tuple.has(pos);
+      if (pos == 9) {
+        assertTrue(has);
+      }
+      else {
+        assertFalse("Tuple position is incorrectly labelled as set: " + pos, has);
+      }
+    }
+  }
 }