Browse Source

svn merge -c 1162008 from trunk for HADOOP-7547.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1162011 13f79535-47bb-0310-9956-ffa450edef68
Tsz-wo Sze 13 years ago
parent
commit
ee8678f7d7
21 changed files with 127 additions and 70 deletions
  1. 3 0
      hadoop-common-project/hadoop-common/CHANGES.txt
  2. 3 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BinaryComparable.java
  3. 8 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java
  4. 8 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java
  5. 4 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java
  6. 8 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java
  7. 8 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java
  8. 13 5
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java
  9. 9 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java
  10. 8 9
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
  11. 6 6
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java
  12. 5 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java
  13. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
  14. 8 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java
  15. 8 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java
  16. 7 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java
  17. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparable.java
  18. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java
  19. 12 7
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/RandomDatum.java
  20. 0 4
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestTextNonUTF8.java
  21. 4 4
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableName.java

+ 3 - 0
hadoop-common-project/hadoop-common/CHANGES.txt

@@ -342,6 +342,9 @@ Release 0.23.0 - Unreleased
 
     HADOOP-7561. Make test-patch only run tests for changed modules. (tomwhite)
 
+    HADOOP-7547. Add generic type in WritableComparable subclasses.
+    (Uma Maheswara Rao G via szetszwo)
+
   OPTIMIZATIONS
   
     HADOOP-7333. Performance improvement in PureJavaCrc32. (Eric Caspole

+ 3 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BinaryComparable.java

@@ -43,6 +43,7 @@ public abstract class BinaryComparable implements Comparable<BinaryComparable> {
    * Compare bytes from {#getBytes()}.
    * @see org.apache.hadoop.io.WritableComparator#compareBytes(byte[],int,int,byte[],int,int)
    */
+  @Override
   public int compareTo(BinaryComparable other) {
     if (this == other)
       return 0;
@@ -61,6 +62,7 @@ public abstract class BinaryComparable implements Comparable<BinaryComparable> {
   /**
    * Return true if bytes from {#getBytes()} match.
    */
+  @Override
   public boolean equals(Object other) {
     if (!(other instanceof BinaryComparable))
       return false;
@@ -74,6 +76,7 @@ public abstract class BinaryComparable implements Comparable<BinaryComparable> {
    * Return a hash of the bytes returned from {#getBytes()}.
    * @see org.apache.hadoop.io.WritableComparator#hashBytes(byte[],int)
    */
+  @Override
   public int hashCode() {
     return WritableComparator.hashBytes(getBytes(), getLength());
   }

+ 8 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java

@@ -28,7 +28,7 @@ import org.apache.hadoop.classification.InterfaceStability;
  */
 @InterfaceAudience.Public
 @InterfaceStability.Stable
-public class BooleanWritable implements WritableComparable {
+public class BooleanWritable implements WritableComparable<BooleanWritable> {
   private boolean value;
 
   /** 
@@ -69,6 +69,7 @@ public class BooleanWritable implements WritableComparable {
 
   /**
    */
+  @Override
   public boolean equals(Object o) {
     if (!(o instanceof BooleanWritable)) {
       return false;
@@ -77,6 +78,7 @@ public class BooleanWritable implements WritableComparable {
     return this.value == other.value;
   }
 
+  @Override
   public int hashCode() {
     return value ? 0 : 1;
   }
@@ -85,12 +87,14 @@ public class BooleanWritable implements WritableComparable {
 
   /**
    */
-  public int compareTo(Object o) {
+  @Override
+  public int compareTo(BooleanWritable o) {
     boolean a = this.value;
-    boolean b = ((BooleanWritable) o).value;
+    boolean b = o.value;
     return ((a == b) ? 0 : (a == false) ? -1 : 1);
   }
   
+  @Override
   public String toString() {
     return Boolean.toString(get());
   }
@@ -103,6 +107,7 @@ public class BooleanWritable implements WritableComparable {
       super(BooleanWritable.class);
     }
 
+    @Override
     public int compare(byte[] b1, int s1, int l1,
                        byte[] b2, int s2, int l2) {
       return compareBytes(b1, s1, l1, b2, s2, l2);

+ 8 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java

@@ -26,7 +26,7 @@ import org.apache.hadoop.classification.InterfaceStability;
 /** A WritableComparable for a single byte. */
 @InterfaceAudience.Public
 @InterfaceStability.Stable
-public class ByteWritable implements WritableComparable {
+public class ByteWritable implements WritableComparable<ByteWritable> {
   private byte value;
 
   public ByteWritable() {}
@@ -48,6 +48,7 @@ public class ByteWritable implements WritableComparable {
   }
 
   /** Returns true iff <code>o</code> is a ByteWritable with the same value. */
+  @Override
   public boolean equals(Object o) {
     if (!(o instanceof ByteWritable)) {
       return false;
@@ -56,17 +57,20 @@ public class ByteWritable implements WritableComparable {
     return this.value == other.value;
   }
 
+  @Override
   public int hashCode() {
     return (int)value;
   }
 
   /** Compares two ByteWritables. */
-  public int compareTo(Object o) {
+  @Override
+  public int compareTo(ByteWritable o) {
     int thisValue = this.value;
-    int thatValue = ((ByteWritable)o).value;
+    int thatValue = o.value;
     return (thisValue < thatValue ? -1 : (thisValue == thatValue ? 0 : 1));
   }
 
+  @Override
   public String toString() {
     return Byte.toString(value);
   }
@@ -77,6 +81,7 @@ public class ByteWritable implements WritableComparable {
       super(ByteWritable.class);
     }
 
+    @Override
     public int compare(byte[] b1, int s1, int l1,
                        byte[] b2, int s2, int l2) {
       byte thisValue = b1[s1];

+ 4 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java

@@ -183,6 +183,7 @@ public class BytesWritable extends BinaryComparable
     out.write(bytes, 0, size);
   }
   
+  @Override
   public int hashCode() {
     return super.hashCode();
   }
@@ -190,6 +191,7 @@ public class BytesWritable extends BinaryComparable
   /**
    * Are the two byte sequences equal?
    */
+  @Override
   public boolean equals(Object right_obj) {
     if (right_obj instanceof BytesWritable)
       return super.equals(right_obj);
@@ -199,6 +201,7 @@ public class BytesWritable extends BinaryComparable
   /**
    * Generate the stream of bytes as hex pairs separated by ' '.
    */
+  @Override
   public String toString() { 
     StringBuilder sb = new StringBuilder(3*size);
     for (int idx = 0; idx < size; idx++) {
@@ -225,6 +228,7 @@ public class BytesWritable extends BinaryComparable
     /**
      * Compare the buffers in serialized form.
      */
+    @Override
     public int compare(byte[] b1, int s1, int l1,
                        byte[] b2, int s2, int l2) {
       return compareBytes(b1, s1+LENGTH_BYTES, l1-LENGTH_BYTES, 

+ 8 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java

@@ -30,7 +30,7 @@ import org.apache.hadoop.classification.InterfaceStability;
  */
 @InterfaceAudience.Public
 @InterfaceStability.Stable
-public class DoubleWritable implements WritableComparable {
+public class DoubleWritable implements WritableComparable<DoubleWritable> {
 
   private double value = 0.0;
   
@@ -57,6 +57,7 @@ public class DoubleWritable implements WritableComparable {
   /**
    * Returns true iff <code>o</code> is a DoubleWritable with the same value.
    */
+  @Override
   public boolean equals(Object o) {
     if (!(o instanceof DoubleWritable)) {
       return false;
@@ -65,15 +66,17 @@ public class DoubleWritable implements WritableComparable {
     return this.value == other.value;
   }
   
+  @Override
   public int hashCode() {
     return (int)Double.doubleToLongBits(value);
   }
   
-  public int compareTo(Object o) {
-    DoubleWritable other = (DoubleWritable)o;
-    return (value < other.value ? -1 : (value == other.value ? 0 : 1));
+  @Override
+  public int compareTo(DoubleWritable o) {
+    return (value < o.value ? -1 : (value == o.value ? 0 : 1));
   }
   
+  @Override
   public String toString() {
     return Double.toString(value);
   }
@@ -84,6 +87,7 @@ public class DoubleWritable implements WritableComparable {
       super(DoubleWritable.class);
     }
 
+    @Override
     public int compare(byte[] b1, int s1, int l1,
                        byte[] b2, int s2, int l2) {
       double thisValue = readDouble(b1, s1);

+ 8 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java

@@ -26,7 +26,7 @@ import org.apache.hadoop.classification.InterfaceStability;
 /** A WritableComparable for floats. */
 @InterfaceAudience.Public
 @InterfaceStability.Stable
-public class FloatWritable implements WritableComparable {
+public class FloatWritable implements WritableComparable<FloatWritable> {
   private float value;
 
   public FloatWritable() {}
@@ -48,6 +48,7 @@ public class FloatWritable implements WritableComparable {
   }
 
   /** Returns true iff <code>o</code> is a FloatWritable with the same value. */
+  @Override
   public boolean equals(Object o) {
     if (!(o instanceof FloatWritable))
       return false;
@@ -55,17 +56,20 @@ public class FloatWritable implements WritableComparable {
     return this.value == other.value;
   }
 
+  @Override
   public int hashCode() {
     return Float.floatToIntBits(value);
   }
 
   /** Compares two FloatWritables. */
-  public int compareTo(Object o) {
+  @Override
+  public int compareTo(FloatWritable o) {
     float thisValue = this.value;
-    float thatValue = ((FloatWritable)o).value;
+    float thatValue = o.value;
     return (thisValue<thatValue ? -1 : (thisValue==thatValue ? 0 : 1));
   }
 
+  @Override
   public String toString() {
     return Float.toString(value);
   }
@@ -75,7 +79,7 @@ public class FloatWritable implements WritableComparable {
     public Comparator() {
       super(FloatWritable.class);
     }
-
+    @Override
     public int compare(byte[] b1, int s1, int l1,
                        byte[] b2, int s2, int l2) {
       float thisValue = readFloat(b1, s1);

+ 13 - 5
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java

@@ -18,7 +18,10 @@
 
 package org.apache.hadoop.io;
 
-import java.io.*;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -26,7 +29,7 @@ import org.apache.hadoop.classification.InterfaceStability;
 /** A WritableComparable for ints. */
 @InterfaceAudience.Public
 @InterfaceStability.Stable
-public class IntWritable implements WritableComparable {
+public class IntWritable implements WritableComparable<IntWritable> {
   private int value;
 
   public IntWritable() {}
@@ -48,6 +51,7 @@ public class IntWritable implements WritableComparable {
   }
 
   /** Returns true iff <code>o</code> is a IntWritable with the same value. */
+  @Override
   public boolean equals(Object o) {
     if (!(o instanceof IntWritable))
       return false;
@@ -55,17 +59,20 @@ public class IntWritable implements WritableComparable {
     return this.value == other.value;
   }
 
+  @Override
   public int hashCode() {
     return value;
   }
 
   /** Compares two IntWritables. */
-  public int compareTo(Object o) {
+  @Override
+  public int compareTo(IntWritable o) {
     int thisValue = this.value;
-    int thatValue = ((IntWritable)o).value;
+    int thatValue = o.value;
     return (thisValue<thatValue ? -1 : (thisValue==thatValue ? 0 : 1));
   }
 
+  @Override
   public String toString() {
     return Integer.toString(value);
   }
@@ -75,7 +82,8 @@ public class IntWritable implements WritableComparable {
     public Comparator() {
       super(IntWritable.class);
     }
-
+    
+    @Override
     public int compare(byte[] b1, int s1, int l1,
                        byte[] b2, int s2, int l2) {
       int thisValue = readInt(b1, s1);

+ 9 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java

@@ -18,7 +18,10 @@
 
 package org.apache.hadoop.io;
 
-import java.io.*;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -26,7 +29,7 @@ import org.apache.hadoop.classification.InterfaceStability;
 /** A WritableComparable for longs. */
 @InterfaceAudience.Public
 @InterfaceStability.Stable
-public class LongWritable implements WritableComparable {
+public class LongWritable implements WritableComparable<LongWritable> {
   private long value;
 
   public LongWritable() {}
@@ -60,9 +63,9 @@ public class LongWritable implements WritableComparable {
   }
 
   /** Compares two LongWritables. */
-  public int compareTo(Object o) {
+  public int compareTo(LongWritable o) {
     long thisValue = this.value;
-    long thatValue = ((LongWritable)o).value;
+    long thatValue = o.value;
     return (thisValue<thatValue ? -1 : (thisValue==thatValue ? 0 : 1));
   }
 
@@ -86,6 +89,8 @@ public class LongWritable implements WritableComparable {
 
   /** A decreasing Comparator optimized for LongWritable. */ 
   public static class DecreasingComparator extends Comparator {
+    
+    @Override
     public int compare(WritableComparable a, WritableComparable b) {
       return -super.compare(a, b);
     }

+ 8 - 9
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java

@@ -18,24 +18,23 @@
 
 package org.apache.hadoop.io;
 
+import java.io.EOFException;
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.io.*;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.util.Options;
-import org.apache.hadoop.fs.*;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.conf.*;
-import org.apache.hadoop.util.Progressable;
-import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
-import org.apache.hadoop.io.SequenceFile.Reader;
-import org.apache.hadoop.io.SequenceFile.Writer;
 import org.apache.hadoop.io.compress.CompressionCodec;
-import org.apache.hadoop.io.compress.DefaultCodec;
+import org.apache.hadoop.util.Options;
+import org.apache.hadoop.util.Progressable;
+import org.apache.hadoop.util.ReflectionUtils;
 
 /** A file-based map from keys to values.
  * 

+ 6 - 6
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java

@@ -26,7 +26,7 @@ import org.apache.hadoop.classification.InterfaceStability;
 /** Singleton Writable with no data. */
 @InterfaceAudience.Public
 @InterfaceStability.Stable
-public class NullWritable implements WritableComparable {
+public class NullWritable implements WritableComparable<NullWritable> {
 
   private static final NullWritable THIS = new NullWritable();
 
@@ -39,12 +39,11 @@ public class NullWritable implements WritableComparable {
     return "(null)";
   }
 
+  @Override
   public int hashCode() { return 0; }
-  public int compareTo(Object other) {
-    if (!(other instanceof NullWritable)) {
-      throw new ClassCastException("can't compare " + other.getClass().getName() 
-                                   + " to NullWritable");
-    }
+  
+  @Override
+  public int compareTo(NullWritable other) {
     return 0;
   }
   public boolean equals(Object other) { return other instanceof NullWritable; }
@@ -60,6 +59,7 @@ public class NullWritable implements WritableComparable {
     /**
      * Compare the buffers in serialized form.
      */
+    @Override
     public int compare(byte[] b1, int s1, int l1,
                        byte[] b2, int s2, int l2) {
       assert 0 == l1;

+ 5 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java

@@ -18,12 +18,14 @@
 
 package org.apache.hadoop.io;
 
-import java.io.*;
 
-import org.apache.hadoop.fs.*;
+import java.io.IOException;
+
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.conf.*;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 
 /** A file-based set of keys. */
 @InterfaceAudience.Public

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java

@@ -265,6 +265,7 @@ public class Text extends BinaryComparable
    * Convert text back to string
    * @see java.lang.Object#toString()
    */
+  @Override
   public String toString() {
     try {
       return decode(bytes, 0, length);
@@ -305,6 +306,7 @@ public class Text extends BinaryComparable
     return false;
   }
 
+  @Override
   public int hashCode() {
     return super.hashCode();
   }

+ 8 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java

@@ -36,7 +36,7 @@ import org.apache.hadoop.classification.InterfaceStability;
 @Deprecated
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceStability.Stable
-public class UTF8 implements WritableComparable {
+public class UTF8 implements WritableComparable<UTF8> {
   private static final Log LOG= LogFactory.getLog(UTF8.class);
   private static final DataInputBuffer IBUF = new DataInputBuffer();
 
@@ -129,13 +129,14 @@ public class UTF8 implements WritableComparable {
   }
 
   /** Compare two UTF8s. */
-  public int compareTo(Object o) {
-    UTF8 that = (UTF8)o;
+  @Override
+  public int compareTo(UTF8 o) {
     return WritableComparator.compareBytes(bytes, 0, length,
-                                           that.bytes, 0, that.length);
+                                           o.bytes, 0, o.length);
   }
 
   /** Convert to a String. */
+  @Override
   public String toString() {
     StringBuilder buffer = new StringBuilder(length);
     try {
@@ -150,6 +151,7 @@ public class UTF8 implements WritableComparable {
   }
 
   /** Returns true iff <code>o</code> is a UTF8 with the same contents.  */
+  @Override
   public boolean equals(Object o) {
     if (!(o instanceof UTF8))
       return false;
@@ -161,6 +163,7 @@ public class UTF8 implements WritableComparable {
                                              that.bytes, 0, that.length) == 0;
   }
 
+  @Override
   public int hashCode() {
     return WritableComparator.hashBytes(bytes, length);
   }
@@ -171,6 +174,7 @@ public class UTF8 implements WritableComparable {
       super(UTF8.class);
     }
 
+    @Override
     public int compare(byte[] b1, int s1, int l1,
                        byte[] b2, int s2, int l2) {
       int n1 = readUnsignedShort(b1, s1);

+ 8 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java

@@ -30,7 +30,7 @@ import org.apache.hadoop.classification.InterfaceStability;
  */
 @InterfaceAudience.Public
 @InterfaceStability.Stable
-public class VIntWritable implements WritableComparable {
+public class VIntWritable implements WritableComparable<VIntWritable> {
   private int value;
 
   public VIntWritable() {}
@@ -52,6 +52,7 @@ public class VIntWritable implements WritableComparable {
   }
 
   /** Returns true iff <code>o</code> is a VIntWritable with the same value. */
+  @Override
   public boolean equals(Object o) {
     if (!(o instanceof VIntWritable))
       return false;
@@ -59,17 +60,20 @@ public class VIntWritable implements WritableComparable {
     return this.value == other.value;
   }
 
+  @Override
   public int hashCode() {
     return value;
   }
 
   /** Compares two VIntWritables. */
-  public int compareTo(Object o) {
+  @Override
+  public int compareTo(VIntWritable o) {
     int thisValue = this.value;
-    int thatValue = ((VIntWritable)o).value;
+    int thatValue = o.value;
     return (thisValue < thatValue ? -1 : (thisValue == thatValue ? 0 : 1));
   }
-
+  
+  @Override
   public String toString() {
     return Integer.toString(value);
   }

+ 7 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java

@@ -30,7 +30,7 @@ import org.apache.hadoop.classification.InterfaceStability;
  */
 @InterfaceAudience.Public
 @InterfaceStability.Stable
-public class VLongWritable implements WritableComparable {
+public class VLongWritable implements WritableComparable<VLongWritable> {
   private long value;
 
   public VLongWritable() {}
@@ -52,6 +52,7 @@ public class VLongWritable implements WritableComparable {
   }
 
   /** Returns true iff <code>o</code> is a VLongWritable with the same value. */
+  @Override
   public boolean equals(Object o) {
     if (!(o instanceof VLongWritable))
       return false;
@@ -59,17 +60,20 @@ public class VLongWritable implements WritableComparable {
     return this.value == other.value;
   }
 
+  @Override
   public int hashCode() {
     return (int)value;
   }
 
   /** Compares two VLongWritables. */
-  public int compareTo(Object o) {
+  @Override
+  public int compareTo(VLongWritable o) {
     long thisValue = this.value;
-    long thatValue = ((VLongWritable)o).value;
+    long thatValue = o.value;
     return (thisValue < thatValue ? -1 : (thisValue == thatValue ? 0 : 1));
   }
 
+  @Override
   public String toString() {
     return Long.toString(value);
   }

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparable.java

@@ -37,7 +37,7 @@ import org.apache.hadoop.classification.InterfaceStability;
  *  
  * <p>Example:</p>
  * <p><blockquote><pre>
- *     public class MyWritableComparable implements WritableComparable {
+ *     public class MyWritableComparable implements WritableComparable<MyWritableComparable> {
  *       // Some data
  *       private int counter;
  *       private long timestamp;
@@ -54,7 +54,7 @@ import org.apache.hadoop.classification.InterfaceStability;
  *       
  *       public int compareTo(MyWritableComparable o) {
  *         int thisValue = this.value;
- *         int thatValue = ((IntWritable)o).value;
+ *         int thatValue = o.value;
  *         return (thisValue &lt; thatValue ? -1 : (thisValue==thatValue ? 0 : 1));
  *       }
  *

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java

@@ -167,7 +167,7 @@ public class Key implements WritableComparable<Key> {
   }
   
   // Comparable
-  
+  @Override
   public int compareTo(Key other) {
     int result = this.bytes.length - other.getBytes().length;
     for (int i = 0; result == 0 && i < bytes.length; i++) {

+ 12 - 7
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/RandomDatum.java

@@ -18,10 +18,13 @@
 
 package org.apache.hadoop.io;
 
-import java.util.*;
-import java.io.*;
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.Random;
 
-public class RandomDatum implements WritableComparable {
+
+public class RandomDatum implements WritableComparable<RandomDatum> {
   private int length;
   private byte[] data;
 
@@ -49,20 +52,22 @@ public class RandomDatum implements WritableComparable {
     in.readFully(data, 0, length);
   }
 
-  public int compareTo(Object o) {
-    RandomDatum that = (RandomDatum)o;
+  @Override
+  public int compareTo(RandomDatum o) {
     return WritableComparator.compareBytes(this.data, 0, this.length,
-                                           that.data, 0, that.length);
+                                           o.data, 0, o.length);
   }
 
+  @Override
   public boolean equals(Object o) {
-    return compareTo(o) == 0;
+    return compareTo((RandomDatum)o) == 0;
   }
 
   private static final char[] HEX_DIGITS =
   {'0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f'};
 
   /** Returns a string representation of this object. */
+  @Override
   public String toString() {
     StringBuilder buf = new StringBuilder(length*2);
     for (int i = 0; i < length; i++) {

+ 0 - 4
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestTextNonUTF8.java

@@ -21,14 +21,10 @@ package org.apache.hadoop.io;
 import junit.framework.TestCase;
 
 import java.nio.charset.MalformedInputException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.util.*;
 import java.util.Arrays;
 
 /** Unit tests for NonUTF8. */
 public class TestTextNonUTF8 extends TestCase {
-  private static final Log LOG= LogFactory.getLog(TestTextNonUTF8.class);
 
   public void testNonUTF8() throws Exception{
     // this is a non UTF8 byte array

+ 4 - 4
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableName.java

@@ -18,12 +18,12 @@
 
 package org.apache.hadoop.io;
 
-import java.io.*;
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
 import java.util.Random;
 
-import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.util.ReflectionUtils;
 
 import junit.framework.TestCase;
 
@@ -97,7 +97,7 @@ public class TestWritableName extends TestCase {
   public void testBadName() throws Exception {
     Configuration conf = new Configuration();
     try {
-      Class<?> test = WritableName.getClass("unknown_junk",conf);
+      WritableName.getClass("unknown_junk",conf);
       assertTrue(false);
     } catch(IOException e) {
       assertTrue(e.getMessage().matches(".*unknown_junk.*"));