git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1389661 13f79535-47bb-0310-9956-ffa450edef68 Conflicts: hadoop-common-project/hadoop-common/CHANGES.txt (cherry picked from commit 3ede27f4557c9e90430a7a3f385b8be243e89688) Conflicts: hadoop-common-project/hadoop-common/CHANGES.txt
@@ -321,6 +321,9 @@ Release 2.6.0 - UNRELEASED
HADOOP-10989. Work around buggy getgrouplist() implementations on Linux that
return 0 on failure. (cnauroth)
+ HADOOP-8815. RandomDatum needs to override hashCode().
+ (Brandon Li via suresh)
+
Release 2.5.1 - UNRELEASED
INCOMPATIBLE CHANGES
@@ -21,6 +21,7 @@ package org.apache.hadoop.io;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
+import java.util.Arrays;
import java.util.Random;
@@ -65,6 +66,11 @@ public class RandomDatum implements WritableComparable<RandomDatum> {
return compareTo((RandomDatum)o) == 0;
}
+ @Override
+ public int hashCode() {
+ return Arrays.hashCode(this.data);
+ }
private static final char[] HEX_DIGITS =
{'0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f'};
@@ -34,6 +34,8 @@ import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
@@ -226,6 +228,15 @@ public class TestCodec {
v2.readFields(inflateIn);
assertTrue("original and compressed-then-decompressed-output not equal",
k1.equals(k2) && v1.equals(v2));
+ // original and compressed-then-decompressed-output have the same hashCode
+ Map<RandomDatum, String> m = new HashMap<RandomDatum, String>();
+ m.put(k1, k1.toString());
+ m.put(v1, v1.toString());
+ String result = m.get(k2);
+ assertEquals("k1 and k2 hashcode not equal", result, k1.toString());
+ result = m.get(v2);
+ assertEquals("v1 and v2 hashcode not equal", result, v1.toString());
// De-compress data byte-at-a-time