Explorar o código

HADOOP-735. Switch generated record code to use BytesWritable to represent fields of type 'buffer'. Contributed by Milind.

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk@497624 13f79535-47bb-0310-9956-ffa450edef68
Doug Cutting %!s(int64=18) %!d(string=hai) anos
pai
achega
ec7abfae9d

+ 3 - 0
CHANGES.txt

@@ -44,6 +44,9 @@ Trunk (unreleased changes)
 13. HADOOP-902.  Fix a NullPointerException in HDFS client when
 13. HADOOP-902.  Fix a NullPointerException in HDFS client when
     closing output streams.  (Raghu Angadi via cutting)
     closing output streams.  (Raghu Angadi via cutting)
 
 
+14. HADOOP-735.  Switch generated record code to use BytesWritable to
+    represent fields of type 'buffer'. (Milind Bhandarkar via cutting)
+
 
 
 Release 0.10.1 - 2007-01-10
 Release 0.10.1 - 2007-01-10
 
 

+ 6 - 8
src/java/org/apache/hadoop/record/BinaryInputArchive.java

@@ -20,9 +20,9 @@ package org.apache.hadoop.record;
 
 
 import java.io.DataInput;
 import java.io.DataInput;
 import java.io.IOException;
 import java.io.IOException;
-import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
 import java.io.DataInputStream;
 import java.io.InputStream;
 import java.io.InputStream;
+import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
 
 
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.io.WritableUtils;
@@ -87,13 +87,11 @@ public class BinaryInputArchive implements InputArchive {
         return text;
         return text;
     }
     }
     
     
-    public ByteArrayOutputStream readBuffer(String tag) throws IOException {
-        int len = readInt(tag);
-        ByteArrayOutputStream buf = new ByteArrayOutputStream(len);
-        byte[] arr = new byte[len];
-        in.readFully(arr);
-        buf.write(arr, 0, len);
-        return buf;
+    public BytesWritable readBuffer(String tag) throws IOException {
+      int len = WritableUtils.readVInt(in);
+      byte[] barr = new byte[len];
+      in.readFully(barr);
+      return new BytesWritable(barr);
     }
     }
     
     
     public void readRecord(Record r, String tag) throws IOException {
     public void readRecord(Record r, String tag) throws IOException {

+ 6 - 5
src/java/org/apache/hadoop/record/BinaryOutputArchive.java

@@ -19,12 +19,12 @@
 package org.apache.hadoop.record;
 package org.apache.hadoop.record;
 
 
 import java.io.IOException;
 import java.io.IOException;
-import java.io.ByteArrayOutputStream;
 import java.util.TreeMap;
 import java.util.TreeMap;
 import java.util.ArrayList;
 import java.util.ArrayList;
 import java.io.DataOutput;
 import java.io.DataOutput;
 import java.io.DataOutputStream;
 import java.io.DataOutputStream;
 import java.io.OutputStream;
 import java.io.OutputStream;
+import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
 
 
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.io.WritableUtils;
@@ -74,11 +74,12 @@ public class BinaryOutputArchive implements OutputArchive {
         s.write(out);
         s.write(out);
     }
     }
     
     
-    public void writeBuffer(ByteArrayOutputStream buf, String tag)
+    public void writeBuffer(BytesWritable buf, String tag)
     throws IOException {
     throws IOException {
-        byte[] barr = buf.toByteArray();
-        writeInt(barr.length, tag);
-        out.write(barr);
+      byte[] barr = buf.get();
+      int len = buf.getSize();
+      WritableUtils.writeVInt(out, len);
+      out.write(barr, 0, len);
     }
     }
     
     
     public void writeRecord(Record r, String tag) throws IOException {
     public void writeRecord(Record r, String tag) throws IOException {

+ 2 - 2
src/java/org/apache/hadoop/record/CsvInputArchive.java

@@ -21,9 +21,9 @@ package org.apache.hadoop.record;
 import java.io.InputStreamReader;
 import java.io.InputStreamReader;
 import java.io.InputStream;
 import java.io.InputStream;
 import java.io.IOException;
 import java.io.IOException;
-import java.io.ByteArrayOutputStream;
 import java.io.PushbackReader;
 import java.io.PushbackReader;
 import java.io.UnsupportedEncodingException;
 import java.io.UnsupportedEncodingException;
+import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
 
 
 /**
 /**
@@ -127,7 +127,7 @@ class CsvInputArchive implements InputArchive {
         
         
     }
     }
     
     
-    public ByteArrayOutputStream readBuffer(String tag) throws IOException {
+    public BytesWritable readBuffer(String tag) throws IOException {
         String sval = readField(tag);
         String sval = readField(tag);
         return Utils.fromCSVBuffer(sval);
         return Utils.fromCSVBuffer(sval);
     }
     }

+ 2 - 2
src/java/org/apache/hadoop/record/CsvOutputArchive.java

@@ -19,12 +19,12 @@
 package org.apache.hadoop.record;
 package org.apache.hadoop.record;
 
 
 import java.io.IOException;
 import java.io.IOException;
-import java.io.ByteArrayOutputStream;
 import java.util.TreeMap;
 import java.util.TreeMap;
 import java.util.ArrayList;
 import java.util.ArrayList;
 import java.io.PrintStream;
 import java.io.PrintStream;
 import java.io.OutputStream;
 import java.io.OutputStream;
 import java.io.UnsupportedEncodingException;
 import java.io.UnsupportedEncodingException;
+import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
 
 
 /**
 /**
@@ -97,7 +97,7 @@ public class CsvOutputArchive implements OutputArchive {
         throwExceptionOnError(tag);
         throwExceptionOnError(tag);
     }
     }
     
     
-    public void writeBuffer(ByteArrayOutputStream buf, String tag)
+    public void writeBuffer(BytesWritable buf, String tag)
     throws IOException {
     throws IOException {
         printCommaUnlessFirst();
         printCommaUnlessFirst();
         stream.print(Utils.toCSVBuffer(buf));
         stream.print(Utils.toCSVBuffer(buf));

+ 2 - 2
src/java/org/apache/hadoop/record/InputArchive.java

@@ -19,7 +19,7 @@
 package org.apache.hadoop.record;
 package org.apache.hadoop.record;
 
 
 import java.io.IOException;
 import java.io.IOException;
-import java.io.ByteArrayOutputStream;
+import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
 
 
 /**
 /**
@@ -35,7 +35,7 @@ public interface InputArchive {
     public float readFloat(String tag) throws IOException;
     public float readFloat(String tag) throws IOException;
     public double readDouble(String tag) throws IOException;
     public double readDouble(String tag) throws IOException;
     public Text readString(String tag) throws IOException;
     public Text readString(String tag) throws IOException;
-    public ByteArrayOutputStream readBuffer(String tag) throws IOException;
+    public BytesWritable readBuffer(String tag) throws IOException;
     public void readRecord(Record r, String tag) throws IOException;
     public void readRecord(Record r, String tag) throws IOException;
     public void startRecord(String tag) throws IOException;
     public void startRecord(String tag) throws IOException;
     public void endRecord(String tag) throws IOException;
     public void endRecord(String tag) throws IOException;

+ 2 - 2
src/java/org/apache/hadoop/record/OutputArchive.java

@@ -19,9 +19,9 @@
 package org.apache.hadoop.record;
 package org.apache.hadoop.record;
 
 
 import java.io.IOException;
 import java.io.IOException;
-import java.io.ByteArrayOutputStream;
 import java.util.TreeMap;
 import java.util.TreeMap;
 import java.util.ArrayList;
 import java.util.ArrayList;
+import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
 
 
 /**
 /**
@@ -37,7 +37,7 @@ public interface OutputArchive {
     public void writeFloat(float f, String tag) throws IOException;
     public void writeFloat(float f, String tag) throws IOException;
     public void writeDouble(double d, String tag) throws IOException;
     public void writeDouble(double d, String tag) throws IOException;
     public void writeString(Text s, String tag) throws IOException;
     public void writeString(Text s, String tag) throws IOException;
-    public void writeBuffer(ByteArrayOutputStream buf, String tag)
+    public void writeBuffer(BytesWritable buf, String tag)
         throws IOException;
         throws IOException;
     public void writeRecord(Record r, String tag) throws IOException;
     public void writeRecord(Record r, String tag) throws IOException;
     public void startRecord(Record r, String tag) throws IOException;
     public void startRecord(Record r, String tag) throws IOException;

+ 19 - 50
src/java/org/apache/hadoop/record/Utils.java

@@ -19,11 +19,8 @@
 package org.apache.hadoop.record;
 package org.apache.hadoop.record;
 
 
 import java.io.ByteArrayOutputStream;
 import java.io.ByteArrayOutputStream;
-import java.io.DataInput;
-import java.io.DataOutput;
 import java.io.IOException;
 import java.io.IOException;
-import java.io.UnsupportedEncodingException;
-import java.nio.charset.CharacterCodingException;
+import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
 
 
 /**
 /**
@@ -36,32 +33,6 @@ public class Utils {
     private Utils() {
     private Utils() {
     }
     }
    
    
-    /**
-     * equals function that actually compares two buffers.
-     *
-     * @param one First buffer
-     * @param two Second buffer
-     * @return true if one and two contain exactly the same content, else false.
-     */
-    public static boolean bufEquals(ByteArrayOutputStream one,
-            ByteArrayOutputStream two) {
-        if (one == two) {
-            return true;
-        }
-        byte[] onearray = one.toByteArray();
-        byte[] twoarray = two.toByteArray();
-        boolean ret = (onearray.length == twoarray.length);
-        if (!ret) {
-            return ret;
-        }
-        for (int idx = 0; idx < onearray.length; idx++) {
-            if (onearray[idx] != twoarray[idx]) {
-                return false;
-            }
-        }
-        return true;
-    }
-    
     public static final char[] hexchars = { '0', '1', '2', '3', '4', '5',
     public static final char[] hexchars = { '0', '1', '2', '3', '4', '5',
                                             '6', '7', '8', '9', 'A', 'B',
                                             '6', '7', '8', '9', 'A', 'B',
                                             'C', 'D', 'E', 'F' };
                                             'C', 'D', 'E', 'F' };
@@ -200,10 +171,11 @@ public class Utils {
      * @param s 
      * @param s 
      * @return 
      * @return 
      */
      */
-    static String toXMLBuffer(ByteArrayOutputStream s) {
-        byte[] barr = s.toByteArray();
-        StringBuffer sb = new StringBuffer(2*barr.length);
-        for (int idx = 0; idx < barr.length; idx++) {
+    static String toXMLBuffer(BytesWritable s) {
+        byte[] barr = s.get();
+        int bsize = s.getSize();
+        StringBuffer sb = new StringBuffer(2*bsize);
+        for (int idx = 0; idx < bsize; idx++) {
             sb.append(Integer.toHexString((int)barr[idx]));
             sb.append(Integer.toHexString((int)barr[idx]));
         }
         }
         return sb.toString();
         return sb.toString();
@@ -215,10 +187,9 @@ public class Utils {
      * @throws java.io.IOException 
      * @throws java.io.IOException 
      * @return 
      * @return 
      */
      */
-    static ByteArrayOutputStream fromXMLBuffer(String s)
+    static BytesWritable fromXMLBuffer(String s)
     throws IOException {
     throws IOException {
-        ByteArrayOutputStream stream =  new ByteArrayOutputStream();
-        if (s.length() == 0) { return stream; }
+        if (s.length() == 0) { return new BytesWritable(); }
         int blen = s.length()/2;
         int blen = s.length()/2;
         byte[] barr = new byte[blen];
         byte[] barr = new byte[blen];
         for (int idx = 0; idx < blen; idx++) {
         for (int idx = 0; idx < blen; idx++) {
@@ -226,8 +197,7 @@ public class Utils {
             char c2 = s.charAt(2*idx+1);
             char c2 = s.charAt(2*idx+1);
             barr[idx] = Byte.parseByte(""+c1+c2, 16);
             barr[idx] = Byte.parseByte(""+c1+c2, 16);
         }
         }
-        stream.write(barr);
-        return stream;
+        return new BytesWritable(barr);
     }
     }
     
     
     /**
     /**
@@ -235,11 +205,12 @@ public class Utils {
      * @param buf 
      * @param buf 
      * @return 
      * @return 
      */
      */
-    static String toCSVBuffer(ByteArrayOutputStream buf) {
-        byte[] barr = buf.toByteArray();
-        StringBuffer sb = new StringBuffer(barr.length+1);
+    static String toCSVBuffer(BytesWritable buf) {
+        byte[] barr = buf.get();
+        int bsize = buf.getSize();
+        StringBuffer sb = new StringBuffer(bsize+1);
         sb.append('#');
         sb.append('#');
-        for(int idx = 0; idx < barr.length; idx++) {
+        for(int idx = 0; idx < bsize; idx++) {
             sb.append(Integer.toHexString((int)barr[idx]));
             sb.append(Integer.toHexString((int)barr[idx]));
         }
         }
         return sb.toString();
         return sb.toString();
@@ -247,18 +218,17 @@ public class Utils {
     
     
     /**
     /**
      * Converts a CSV-serialized representation of buffer to a new
      * Converts a CSV-serialized representation of buffer to a new
-     * ByteArrayOutputStream.
+     * BytesWritable.
      * @param s CSV-serialized representation of buffer
      * @param s CSV-serialized representation of buffer
      * @throws java.io.IOException 
      * @throws java.io.IOException 
-     * @return Deserialized ByteArrayOutputStream
+     * @return Deserialized BytesWritable
      */
      */
-    static ByteArrayOutputStream fromCSVBuffer(String s)
+    static BytesWritable fromCSVBuffer(String s)
     throws IOException {
     throws IOException {
         if (s.charAt(0) != '#') {
         if (s.charAt(0) != '#') {
             throw new IOException("Error deserializing buffer.");
             throw new IOException("Error deserializing buffer.");
         }
         }
-        ByteArrayOutputStream stream =  new ByteArrayOutputStream();
-        if (s.length() == 1) { return stream; }
+        if (s.length() == 1) { return new BytesWritable(); }
         int blen = (s.length()-1)/2;
         int blen = (s.length()-1)/2;
         byte[] barr = new byte[blen];
         byte[] barr = new byte[blen];
         for (int idx = 0; idx < blen; idx++) {
         for (int idx = 0; idx < blen; idx++) {
@@ -266,7 +236,6 @@ public class Utils {
             char c2 = s.charAt(2*idx+2);
             char c2 = s.charAt(2*idx+2);
             barr[idx] = Byte.parseByte(""+c1+c2, 16);
             barr[idx] = Byte.parseByte(""+c1+c2, 16);
         }
         }
-        stream.write(barr);
-        return stream;
+        return new BytesWritable(barr);
     }
     }
 }
 }

+ 2 - 2
src/java/org/apache/hadoop/record/XmlInputArchive.java

@@ -20,7 +20,6 @@ package org.apache.hadoop.record;
 
 
 import java.io.InputStream;
 import java.io.InputStream;
 import java.io.IOException;
 import java.io.IOException;
-import java.io.ByteArrayOutputStream;
 import java.util.ArrayList;
 import java.util.ArrayList;
 
 
 import org.xml.sax.*;
 import org.xml.sax.*;
@@ -28,6 +27,7 @@ import org.xml.sax.helpers.DefaultHandler;
 import javax.xml.parsers.SAXParserFactory;
 import javax.xml.parsers.SAXParserFactory;
 import javax.xml.parsers.ParserConfigurationException;
 import javax.xml.parsers.ParserConfigurationException;
 import javax.xml.parsers.SAXParser;
 import javax.xml.parsers.SAXParser;
+import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
 /**
 /**
  *
  *
@@ -207,7 +207,7 @@ class XmlInputArchive implements InputArchive {
         return Utils.fromXMLString(v.getValue());
         return Utils.fromXMLString(v.getValue());
     }
     }
     
     
-    public ByteArrayOutputStream readBuffer(String tag) throws IOException {
+    public BytesWritable readBuffer(String tag) throws IOException {
         Value v = next();
         Value v = next();
         if (!"string".equals(v.getType())) {
         if (!"string".equals(v.getType())) {
             throw new IOException("Error deserializing "+tag+".");
             throw new IOException("Error deserializing "+tag+".");

+ 2 - 2
src/java/org/apache/hadoop/record/XmlOutputArchive.java

@@ -19,12 +19,12 @@
 package org.apache.hadoop.record;
 package org.apache.hadoop.record;
 
 
 import java.io.IOException;
 import java.io.IOException;
-import java.io.ByteArrayOutputStream;
 import java.util.TreeMap;
 import java.util.TreeMap;
 import java.util.ArrayList;
 import java.util.ArrayList;
 import java.io.PrintStream;
 import java.io.PrintStream;
 import java.io.OutputStream;
 import java.io.OutputStream;
 import java.util.Stack;
 import java.util.Stack;
+import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
 
 
 /**
 /**
@@ -199,7 +199,7 @@ class XmlOutputArchive implements OutputArchive {
         printEndEnvelope(tag);
         printEndEnvelope(tag);
     }
     }
     
     
-    public void writeBuffer(ByteArrayOutputStream buf, String tag)
+    public void writeBuffer(BytesWritable buf, String tag)
     throws IOException {
     throws IOException {
         printBeginEnvelope(tag);
         printBeginEnvelope(tag);
         stream.print("<string>");
         stream.print("<string>");

+ 9 - 8
src/java/org/apache/hadoop/record/compiler/JBuffer.java

@@ -26,7 +26,7 @@ public class JBuffer extends JCompType {
     
     
     /** Creates a new instance of JBuffer */
     /** Creates a new instance of JBuffer */
     public JBuffer() {
     public JBuffer() {
-        super(" ::std::string", "java.io.ByteArrayOutputStream", "Buffer", "java.io.ByteArrayOutputStream");
+        super(" ::std::string", "BytesWritable", "Buffer", "BytesWritable");
     }
     }
     
     
     public String genCppGetSet(String fname, int fIdx) {
     public String genCppGetSet(String fname, int fIdx) {
@@ -34,7 +34,7 @@ public class JBuffer extends JCompType {
         cgetFunc += "    return m"+fname+";\n";
         cgetFunc += "    return m"+fname+";\n";
         cgetFunc += "  }\n";
         cgetFunc += "  }\n";
         String getFunc = "  virtual "+getCppType()+"& get"+fname+"() {\n";
         String getFunc = "  virtual "+getCppType()+"& get"+fname+"() {\n";
-        getFunc += "    bs_.set("+fIdx+");return m"+fname+";\n";
+        getFunc += "    return m"+fname+";\n";
         getFunc += "  }\n";
         getFunc += "  }\n";
         return cgetFunc + getFunc;
         return cgetFunc + getFunc;
     }
     }
@@ -46,7 +46,7 @@ public class JBuffer extends JCompType {
     public String genJavaReadWrapper(String fname, String tag, boolean decl) {
     public String genJavaReadWrapper(String fname, String tag, boolean decl) {
         String ret = "";
         String ret = "";
         if (decl) {
         if (decl) {
-            ret = "    java.io.ByteArrayOutputStream "+fname+";\n";
+            ret = "    BytesWritable "+fname+";\n";
         }
         }
         return ret + "        "+fname+"=a_.readBuffer(\""+tag+"\");\n";
         return ret + "        "+fname+"=a_.readBuffer(\""+tag+"\");\n";
     }
     }
@@ -58,9 +58,10 @@ public class JBuffer extends JCompType {
     public String genJavaCompareTo(String fname, String other) {
     public String genJavaCompareTo(String fname, String other) {
       StringBuffer sb = new StringBuffer();
       StringBuffer sb = new StringBuffer();
       sb.append("    {\n");
       sb.append("    {\n");
-      sb.append("      byte[] my = "+fname+".toByteArray();\n");
-      sb.append("      byte[] ur = "+other+".toByteArray();\n");
-      sb.append("      ret = WritableComparator.compareBytes(my,0,my.length,ur,0,ur.length);\n");
+      sb.append("      byte[] my = "+fname+".get();\n");
+      sb.append("      byte[] ur = "+other+".get();\n");
+      sb.append("      ret = WritableComparator.compareBytes(my,0,"+
+          fname+".getSize(),ur,0,"+other+".getSize());\n");
       sb.append("    }\n");
       sb.append("    }\n");
       return sb.toString();
       return sb.toString();
     }
     }
@@ -70,11 +71,11 @@ public class JBuffer extends JCompType {
     }
     }
     
     
     public String genJavaEquals(String fname, String peer) {
     public String genJavaEquals(String fname, String peer) {
-        return "    ret = org.apache.hadoop.record.Utils.bufEquals("+fname+","+peer+");\n";
+        return "    ret = "+fname+".equals("+peer+");\n";
     }
     }
     
     
     public String genJavaHashCode(String fname) {
     public String genJavaHashCode(String fname) {
-        return "    ret = "+fname+".toString().hashCode();\n";
+        return "    ret = "+fname+".hashCode();\n";
     }
     }
     
     
     public String genJavaSlurpBytes(String b, String s, String l) {
     public String genJavaSlurpBytes(String b, String s, String l) {

+ 1 - 1
src/java/org/apache/hadoop/record/compiler/JCompType.java

@@ -36,7 +36,7 @@ abstract class JCompType extends JType {
         cgetFunc += "    return m"+fname+";\n";
         cgetFunc += "    return m"+fname+";\n";
         cgetFunc += "  }\n";
         cgetFunc += "  }\n";
         String getFunc = "  virtual "+getCppType()+"& get"+fname+"() {\n";
         String getFunc = "  virtual "+getCppType()+"& get"+fname+"() {\n";
-        getFunc += "    bs_.set("+fIdx+");return m"+fname+";\n";
+        getFunc += "    return m"+fname+";\n";
         getFunc += "  }\n";
         getFunc += "  }\n";
         return cgetFunc + getFunc;
         return cgetFunc + getFunc;
     }
     }

+ 7 - 46
src/java/org/apache/hadoop/record/compiler/JRecord.java

@@ -139,13 +139,11 @@ public class JRecord extends JCompType {
             JField jf = (JField) i.next();
             JField jf = (JField) i.next();
             hh.write(jf.genCppDecl());
             hh.write(jf.genCppDecl());
         }
         }
-        hh.write("  mutable std::bitset<"+mFields.size()+"> bs_;\n");
         hh.write("public:\n");
         hh.write("public:\n");
         hh.write("  virtual void serialize(::hadoop::OArchive& a_, const char* tag) const;\n");
         hh.write("  virtual void serialize(::hadoop::OArchive& a_, const char* tag) const;\n");
         hh.write("  virtual void deserialize(::hadoop::IArchive& a_, const char* tag);\n");
         hh.write("  virtual void deserialize(::hadoop::IArchive& a_, const char* tag);\n");
         hh.write("  virtual const ::std::string& type() const;\n");
         hh.write("  virtual const ::std::string& type() const;\n");
         hh.write("  virtual const ::std::string& signature() const;\n");
         hh.write("  virtual const ::std::string& signature() const;\n");
-        hh.write("  virtual bool validate() const;\n");
         hh.write("  virtual bool operator<(const "+getName()+"& peer_) const;\n");
         hh.write("  virtual bool operator<(const "+getName()+"& peer_) const;\n");
         hh.write("  virtual bool operator==(const "+getName()+"& peer_) const;\n");
         hh.write("  virtual bool operator==(const "+getName()+"& peer_) const;\n");
         hh.write("  virtual ~"+getName()+"() {};\n");
         hh.write("  virtual ~"+getName()+"() {};\n");
@@ -159,7 +157,6 @@ public class JRecord extends JCompType {
             hh.write("} // end namespace "+ns[i]+"\n");
             hh.write("} // end namespace "+ns[i]+"\n");
         }
         }
         cc.write("void "+getCppFQName()+"::serialize(::hadoop::OArchive& a_, const char* tag) const {\n");
         cc.write("void "+getCppFQName()+"::serialize(::hadoop::OArchive& a_, const char* tag) const {\n");
-        cc.write("  if (!validate()) throw new ::hadoop::IOException(\"All fields not set.\");\n");
         cc.write("  a_.startRecord(*this,tag);\n");
         cc.write("  a_.startRecord(*this,tag);\n");
         fIdx = 0;
         fIdx = 0;
         for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
         for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
@@ -170,7 +167,6 @@ public class JRecord extends JCompType {
             } else {
             } else {
                 cc.write("  a_.serialize("+name+",\""+jf.getTag()+"\");\n");
                 cc.write("  a_.serialize("+name+",\""+jf.getTag()+"\");\n");
             }
             }
-            cc.write("  bs_.reset("+fIdx+");\n");
         }
         }
         cc.write("  a_.endRecord(*this,tag);\n");
         cc.write("  a_.endRecord(*this,tag);\n");
         cc.write("  return;\n");
         cc.write("  return;\n");
@@ -187,23 +183,11 @@ public class JRecord extends JCompType {
             } else {
             } else {
                 cc.write("  a_.deserialize("+name+",\""+jf.getTag()+"\");\n");
                 cc.write("  a_.deserialize("+name+",\""+jf.getTag()+"\");\n");
             }
             }
-            cc.write("  bs_.set("+fIdx+");\n");
         }
         }
         cc.write("  a_.endRecord(*this,tag);\n");
         cc.write("  a_.endRecord(*this,tag);\n");
         cc.write("  return;\n");
         cc.write("  return;\n");
         cc.write("}\n");
         cc.write("}\n");
         
         
-        cc.write("bool "+getCppFQName()+"::validate() const {\n");
-        cc.write("  if (bs_.size() != bs_.count()) return false;\n");
-        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
-            JField jf = (JField) i.next();
-            JType type = jf.getType();
-            if (type instanceof JRecord) {
-                cc.write("  if (!"+jf.getName()+".validate()) return false;\n");
-            }
-        }
-        cc.write("  return true;\n");
-        cc.write("}\n");
         
         
         cc.write("bool "+getCppFQName()+"::operator< (const "+getCppFQName()+"& peer_) const {\n");
         cc.write("bool "+getCppFQName()+"::operator< (const "+getCppFQName()+"& peer_) const {\n");
         cc.write("  return (1\n");
         cc.write("  return (1\n");
@@ -261,6 +245,7 @@ public class JRecord extends JCompType {
         jj.write("import org.apache.hadoop.io.WritableComparator;\n");
         jj.write("import org.apache.hadoop.io.WritableComparator;\n");
         jj.write("import org.apache.hadoop.io.WritableComparable;\n");
         jj.write("import org.apache.hadoop.io.WritableComparable;\n");
         jj.write("import org.apache.hadoop.io.WritableUtils;\n");
         jj.write("import org.apache.hadoop.io.WritableUtils;\n");
+        jj.write("import org.apache.hadoop.io.BytesWritable;\n");
         jj.write("import org.apache.hadoop.io.Text;\n\n");
         jj.write("import org.apache.hadoop.io.Text;\n\n");
         jj.write("public class "+getName()+" implements org.apache.hadoop.record.Record, WritableComparable {\n");
         jj.write("public class "+getName()+" implements org.apache.hadoop.record.Record, WritableComparable {\n");
         jj.write("  private static final Log LOG= LogFactory.getLog(\""+
         jj.write("  private static final Log LOG= LogFactory.getLog(\""+
@@ -269,23 +254,17 @@ public class JRecord extends JCompType {
             JField jf = (JField) i.next();
             JField jf = (JField) i.next();
             jj.write(jf.genJavaDecl());
             jj.write(jf.genJavaDecl());
         }
         }
-        jj.write("  private java.util.BitSet bs_;\n");
-        jj.write("  public "+getName()+"() {\n");
-        jj.write("    bs_ = new java.util.BitSet("+(mFields.size()+1)+");\n");
-        jj.write("    bs_.set("+mFields.size()+");\n");
-        jj.write("  }\n");
+        jj.write("  public "+getName()+"() { }\n");
+        
         
         
         jj.write("  public "+getName()+"(\n");
         jj.write("  public "+getName()+"(\n");
         int fIdx = 0;
         int fIdx = 0;
-        int fLen = mFields.size();
         for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
         for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
             JField jf = (JField) i.next();
             JField jf = (JField) i.next();
             jj.write(jf.genJavaConstructorParam(fIdx));
             jj.write(jf.genJavaConstructorParam(fIdx));
-            jj.write((fLen-1 == fIdx)?"":",\n");
+            jj.write((!i.hasNext())?"":",\n");
         }
         }
         jj.write(") {\n");
         jj.write(") {\n");
-        jj.write("    bs_ = new java.util.BitSet("+(mFields.size()+1)+");\n");
-        jj.write("    bs_.set("+mFields.size()+");\n");
         fIdx = 0;
         fIdx = 0;
         for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
         for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
             JField jf = (JField) i.next();
             JField jf = (JField) i.next();
@@ -298,24 +277,19 @@ public class JRecord extends JCompType {
             jj.write(jf.genJavaGetSet(fIdx));
             jj.write(jf.genJavaGetSet(fIdx));
         }
         }
         jj.write("  public void serialize(org.apache.hadoop.record.OutputArchive a_, String tag) throws java.io.IOException {\n");
         jj.write("  public void serialize(org.apache.hadoop.record.OutputArchive a_, String tag) throws java.io.IOException {\n");
-        jj.write("    if (!validate()) throw new java.io.IOException(\"All fields not set:\");\n");
         jj.write("    a_.startRecord(this,tag);\n");
         jj.write("    a_.startRecord(this,tag);\n");
-        fIdx = 0;
-        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
+        for (Iterator i = mFields.iterator(); i.hasNext();) {
             JField jf = (JField) i.next();
             JField jf = (JField) i.next();
             jj.write(jf.genJavaWriteMethodName());
             jj.write(jf.genJavaWriteMethodName());
-            jj.write("    bs_.clear("+fIdx+");\n");
         }
         }
         jj.write("    a_.endRecord(this,tag);\n");
         jj.write("    a_.endRecord(this,tag);\n");
         jj.write("  }\n");
         jj.write("  }\n");
         
         
         jj.write("  public void deserialize(org.apache.hadoop.record.InputArchive a_, String tag) throws java.io.IOException {\n");
         jj.write("  public void deserialize(org.apache.hadoop.record.InputArchive a_, String tag) throws java.io.IOException {\n");
         jj.write("    a_.startRecord(tag);\n");
         jj.write("    a_.startRecord(tag);\n");
-        fIdx = 0;
-        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
+        for (Iterator i = mFields.iterator(); i.hasNext();) {
             JField jf = (JField) i.next();
             JField jf = (JField) i.next();
             jj.write(jf.genJavaReadMethodName());
             jj.write(jf.genJavaReadMethodName());
-            jj.write("    bs_.set("+fIdx+");\n");
         }
         }
         jj.write("    a_.endRecord(tag);\n");
         jj.write("    a_.endRecord(tag);\n");
         jj.write("}\n");
         jj.write("}\n");
@@ -335,9 +309,8 @@ public class JRecord extends JCompType {
         jj.write("      a_.endRecord(this,\"\");\n");
         jj.write("      a_.endRecord(this,\"\");\n");
         jj.write("      return new String(s.toByteArray(), \"UTF-8\");\n");
         jj.write("      return new String(s.toByteArray(), \"UTF-8\");\n");
         jj.write("    } catch (Throwable ex) {\n");
         jj.write("    } catch (Throwable ex) {\n");
-        jj.write("      ex.printStackTrace();\n");
+        jj.write("      throw new RuntimeException(ex);\n");
         jj.write("    }\n");
         jj.write("    }\n");
-        jj.write("    return \"ERROR\";\n");
         jj.write("  }\n");
         jj.write("  }\n");
         
         
         jj.write("  public void write(java.io.DataOutput out) throws java.io.IOException {\n");
         jj.write("  public void write(java.io.DataOutput out) throws java.io.IOException {\n");
@@ -350,18 +323,6 @@ public class JRecord extends JCompType {
         jj.write("    deserialize(archive, \"\");\n");
         jj.write("    deserialize(archive, \"\");\n");
         jj.write("  }\n");
         jj.write("  }\n");
         
         
-        jj.write("  public boolean validate() {\n");
-        jj.write("    if (bs_.cardinality() != bs_.length()) return false;\n");
-        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
-            JField jf = (JField) i.next();
-            JType type = jf.getType();
-            if (type instanceof JRecord) {
-                jj.write("    if (!"+jf.getName()+".validate()) return false;\n");
-            }
-        }
-        jj.write("    return true;\n");
-        jj.write("}\n");
-        
         jj.write("  public int compareTo (Object peer_) throws ClassCastException {\n");
         jj.write("  public int compareTo (Object peer_) throws ClassCastException {\n");
         jj.write("    if (!(peer_ instanceof "+getName()+")) {\n");
         jj.write("    if (!(peer_ instanceof "+getName()+")) {\n");
         jj.write("      throw new ClassCastException(\"Comparing different types of records.\");\n");
         jj.write("      throw new ClassCastException(\"Comparing different types of records.\");\n");

+ 3 - 3
src/java/org/apache/hadoop/record/compiler/JType.java

@@ -61,7 +61,7 @@ abstract public class JType {
         getFunc += "    return m"+fname+";\n";
         getFunc += "    return m"+fname+";\n";
         getFunc += "  }\n";
         getFunc += "  }\n";
         String setFunc = "  virtual void set"+fname+"("+mCppName+" m_) {\n";
         String setFunc = "  virtual void set"+fname+"("+mCppName+" m_) {\n";
-        setFunc += "    m"+fname+"=m_; bs_.set("+fIdx+");\n";
+        setFunc += "    m"+fname+"=m_;\n";
         setFunc += "  }\n";
         setFunc += "  }\n";
         return getFunc+setFunc;
         return getFunc+setFunc;
     }
     }
@@ -71,7 +71,7 @@ abstract public class JType {
         getFunc += "    return m"+fname+";\n";
         getFunc += "    return m"+fname+";\n";
         getFunc += "  }\n";
         getFunc += "  }\n";
         String setFunc = "  public void set"+fname+"("+mJavaName+" m_) {\n";
         String setFunc = "  public void set"+fname+"("+mJavaName+" m_) {\n";
-        setFunc += "    m"+fname+"=m_; bs_.set("+fIdx+");\n";
+        setFunc += "    m"+fname+"=m_;\n";
         setFunc += "  }\n";
         setFunc += "  }\n";
         return getFunc+setFunc;
         return getFunc+setFunc;
     }
     }
@@ -143,6 +143,6 @@ abstract public class JType {
     }
     }
 
 
     String genJavaConstructorSet(String fname, int fIdx) {
     String genJavaConstructorSet(String fname, int fIdx) {
-        return "    m"+fname+"=m"+fIdx+"; bs_.set("+fIdx+");\n";
+        return "    m"+fname+"=m"+fIdx+";\n";
     }
     }
 }
 }

+ 1 - 1
src/java/org/apache/hadoop/record/compiler/ant/RccTask.java

@@ -52,7 +52,7 @@ public class RccTask extends Task {
   private String language = "java";
   private String language = "java";
   private File src;
   private File src;
   private File dest = new File(".");
   private File dest = new File(".");
-  private final Vector<FileSet> filesets = new Vector();
+  private final Vector<FileSet> filesets = new Vector<FileSet>();
   private boolean failOnError = true;
   private boolean failOnError = true;
   
   
   /** Creates a new instance of RccTask */
   /** Creates a new instance of RccTask */

+ 4 - 3
src/test/org/apache/hadoop/record/test/FromCpp.java

@@ -26,6 +26,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.ArrayList;
 import java.util.TreeMap;
 import java.util.TreeMap;
 import junit.framework.*;
 import junit.framework.*;
+import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
 
 
 /**
 /**
@@ -56,7 +57,7 @@ public class FromCpp extends TestCase {
             r1.setIntVal(4567);
             r1.setIntVal(4567);
             r1.setLongVal(0x5a5a5a5a5a5aL);
             r1.setLongVal(0x5a5a5a5a5a5aL);
             r1.setStringVal(new Text("random text"));
             r1.setStringVal(new Text("random text"));
-            r1.setBufferVal(new ByteArrayOutputStream(20));
+            r1.setBufferVal(new BytesWritable());
             r1.setVectorVal(new ArrayList());
             r1.setVectorVal(new ArrayList());
             r1.setMapVal(new TreeMap());
             r1.setMapVal(new TreeMap());
             FileInputStream istream = new FileInputStream(tmpfile);
             FileInputStream istream = new FileInputStream(tmpfile);
@@ -82,7 +83,7 @@ public class FromCpp extends TestCase {
             r1.setIntVal(4567);
             r1.setIntVal(4567);
             r1.setLongVal(0x5a5a5a5a5a5aL);
             r1.setLongVal(0x5a5a5a5a5a5aL);
             r1.setStringVal(new Text("random text"));
             r1.setStringVal(new Text("random text"));
-            r1.setBufferVal(new ByteArrayOutputStream(20));
+            r1.setBufferVal(new BytesWritable());
             r1.setVectorVal(new ArrayList());
             r1.setVectorVal(new ArrayList());
             r1.setMapVal(new TreeMap());
             r1.setMapVal(new TreeMap());
             FileInputStream istream = new FileInputStream(tmpfile);
             FileInputStream istream = new FileInputStream(tmpfile);
@@ -108,7 +109,7 @@ public class FromCpp extends TestCase {
             r1.setIntVal(4567);
             r1.setIntVal(4567);
             r1.setLongVal(0x5a5a5a5a5a5aL);
             r1.setLongVal(0x5a5a5a5a5a5aL);
             r1.setStringVal(new Text("random text"));
             r1.setStringVal(new Text("random text"));
-            r1.setBufferVal(new ByteArrayOutputStream(20));
+            r1.setBufferVal(new BytesWritable());
             r1.setVectorVal(new ArrayList());
             r1.setVectorVal(new ArrayList());
             r1.setMapVal(new TreeMap());
             r1.setMapVal(new TreeMap());
             FileInputStream istream = new FileInputStream(tmpfile);
             FileInputStream istream = new FileInputStream(tmpfile);

+ 30 - 5
src/test/org/apache/hadoop/record/test/TestRecordIO.java

@@ -22,12 +22,12 @@ import java.io.IOException;
 import junit.framework.*;
 import junit.framework.*;
 import org.apache.hadoop.record.RecordWriter;
 import org.apache.hadoop.record.RecordWriter;
 import org.apache.hadoop.record.RecordReader;
 import org.apache.hadoop.record.RecordReader;
-import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.FileOutputStream;
 import java.util.ArrayList;
 import java.util.ArrayList;
 import java.util.TreeMap;
 import java.util.TreeMap;
+import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
 
 
 /**
 /**
@@ -60,7 +60,7 @@ public class TestRecordIO extends TestCase {
             r1.setIntVal(4567);
             r1.setIntVal(4567);
             r1.setLongVal(0x5a5a5a5a5a5aL);
             r1.setLongVal(0x5a5a5a5a5a5aL);
             r1.setStringVal(new Text("random text"));
             r1.setStringVal(new Text("random text"));
-            r1.setBufferVal(new ByteArrayOutputStream(20));
+            r1.setBufferVal(new BytesWritable());
             r1.setVectorVal(new ArrayList());
             r1.setVectorVal(new ArrayList());
             r1.setMapVal(new TreeMap());
             r1.setMapVal(new TreeMap());
             RecRecord0 r0 = new RecRecord0();
             RecRecord0 r0 = new RecRecord0();
@@ -94,7 +94,7 @@ public class TestRecordIO extends TestCase {
             r1.setIntVal(4567);
             r1.setIntVal(4567);
             r1.setLongVal(0x5a5a5a5a5a5aL);
             r1.setLongVal(0x5a5a5a5a5a5aL);
             r1.setStringVal(new Text("random text"));
             r1.setStringVal(new Text("random text"));
-            r1.setBufferVal(new ByteArrayOutputStream(20));
+            r1.setBufferVal(new BytesWritable());
             r1.setVectorVal(new ArrayList());
             r1.setVectorVal(new ArrayList());
             r1.setMapVal(new TreeMap());
             r1.setMapVal(new TreeMap());
             RecRecord0 r0 = new RecRecord0();
             RecRecord0 r0 = new RecRecord0();
@@ -109,11 +109,36 @@ public class TestRecordIO extends TestCase {
             istream.close();
             istream.close();
             tmpfile.delete();
             tmpfile.delete();
             assertTrue("Serialized and deserialized records do not match.", r1.equals(r2));
             assertTrue("Serialized and deserialized records do not match.", r1.equals(r2));
+            
         } catch (IOException ex) {
         } catch (IOException ex) {
             ex.printStackTrace();
             ex.printStackTrace();
-        } 
+        }
     }
     }
 
 
+    public void testToString() {
+      try {
+            RecRecord1 r1 = new RecRecord1();
+            r1.setBoolVal(true);
+            r1.setByteVal((byte)0x66);
+            r1.setFloatVal(3.145F);
+            r1.setDoubleVal(1.5234);
+            r1.setIntVal(4567);
+            r1.setLongVal(0x5a5a5a5a5a5aL);
+            r1.setStringVal(new Text("random text"));
+            r1.setBufferVal(new BytesWritable());
+            r1.setVectorVal(new ArrayList());
+            r1.setMapVal(new TreeMap());
+            RecRecord0 r0 = new RecRecord0();
+            r0.setStringVal(new Text("other random text"));
+            r1.setRecordVal(r0);
+            System.err.println("Illustrating toString bug"+r1.toString());
+            System.err.println("Illustrating toString bug"+r1.toString());
+        } catch (Throwable ex) {
+            assertTrue("Record.toString cannot be invoked twice in succession."+
+                "This bug has been fixed in the latest version.", false);
+        }
+    }
+    
     public void testXml() {
     public void testXml() {
         File tmpfile;
         File tmpfile;
         try {
         try {
@@ -128,7 +153,7 @@ public class TestRecordIO extends TestCase {
             r1.setIntVal(4567);
             r1.setIntVal(4567);
             r1.setLongVal(0x5a5a5a5a5a5aL);
             r1.setLongVal(0x5a5a5a5a5a5aL);
             r1.setStringVal(new Text("ran\002dom &lt; %text<&more"));
             r1.setStringVal(new Text("ran\002dom &lt; %text<&more"));
-            r1.setBufferVal(new ByteArrayOutputStream(20));
+            r1.setBufferVal(new BytesWritable());
             r1.setVectorVal(new ArrayList());
             r1.setVectorVal(new ArrayList());
             r1.setMapVal(new TreeMap());
             r1.setMapVal(new TreeMap());
             RecRecord0 r0 = new RecRecord0();
             RecRecord0 r0 = new RecRecord0();

+ 1 - 12
src/test/org/apache/hadoop/record/test/TestWritable.java

@@ -63,8 +63,6 @@ public class TestWritable extends TestCase {
     for (int length = 0; length < MAX_LENGTH;
     for (int length = 0; length < MAX_LENGTH;
          length+= random.nextInt(MAX_LENGTH/10)+1) {
          length+= random.nextInt(MAX_LENGTH/10)+1) {
 
 
-      //LOG.info("creating; entries = " + length);
-
       // create a file with length entries
       // create a file with length entries
       SequenceFile.Writer writer =
       SequenceFile.Writer writer =
         new SequenceFile.Writer(fs, conf, file,
         new SequenceFile.Writer(fs, conf, file,
@@ -76,9 +74,7 @@ public class TestWritable extends TestCase {
           byte[] data = new byte[random.nextInt(10)];
           byte[] data = new byte[random.nextInt(10)];
           random.nextBytes(data);
           random.nextBytes(data);
           RecBuffer value = new RecBuffer();
           RecBuffer value = new RecBuffer();
-          ByteArrayOutputStream strm = new ByteArrayOutputStream(data.length);
-          strm.write(data);
-          value.setData(strm);
+          value.setData(new BytesWritable(data));
           writer.append(key, value);
           writer.append(key, value);
         }
         }
       } finally {
       } finally {
@@ -92,9 +88,7 @@ public class TestWritable extends TestCase {
       for (int i = 0; i < 3; i++) {
       for (int i = 0; i < 3; i++) {
         int numSplits =
         int numSplits =
           random.nextInt(MAX_LENGTH/(SequenceFile.SYNC_INTERVAL/20))+1;
           random.nextInt(MAX_LENGTH/(SequenceFile.SYNC_INTERVAL/20))+1;
-        //LOG.info("splitting: requesting = " + numSplits);
         InputSplit[] splits = format.getSplits(job, numSplits);
         InputSplit[] splits = format.getSplits(job, numSplits);
-        //LOG.info("splitting: got =        " + splits.length);
 
 
         // check each split
         // check each split
         BitSet bits = new BitSet(length);
         BitSet bits = new BitSet(length);
@@ -104,15 +98,10 @@ public class TestWritable extends TestCase {
           try {
           try {
             int count = 0;
             int count = 0;
             while (reader.next(key, value)) {
             while (reader.next(key, value)) {
-              // if (bits.get(key.get())) {
-              // LOG.info("splits["+j+"]="+splits[j]+" : " + key.get());
-              // LOG.info("@"+reader.getPos());
-              // }
               assertFalse("Key in multiple partitions.", bits.get(key.getData()));
               assertFalse("Key in multiple partitions.", bits.get(key.getData()));
               bits.set(key.getData());
               bits.set(key.getData());
               count++;
               count++;
             }
             }
-            //LOG.info("splits["+j+"]="+splits[j]+" count=" + count);
           } finally {
           } finally {
             reader.close();
             reader.close();
           }
           }

+ 4 - 3
src/test/org/apache/hadoop/record/test/ToCpp.java

@@ -26,6 +26,7 @@ import java.io.File;
 import java.io.FileOutputStream;
 import java.io.FileOutputStream;
 import java.util.ArrayList;
 import java.util.ArrayList;
 import java.util.TreeMap;
 import java.util.TreeMap;
+import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
 
 
 /**
 /**
@@ -58,7 +59,7 @@ public class ToCpp extends TestCase {
             r1.setIntVal(4567);
             r1.setIntVal(4567);
             r1.setLongVal(0x5a5a5a5a5a5aL);
             r1.setLongVal(0x5a5a5a5a5a5aL);
             r1.setStringVal(new Text("random text"));
             r1.setStringVal(new Text("random text"));
-            r1.setBufferVal(new ByteArrayOutputStream(20));
+            r1.setBufferVal(new BytesWritable());
             r1.setVectorVal(new ArrayList());
             r1.setVectorVal(new ArrayList());
             r1.setMapVal(new TreeMap());
             r1.setMapVal(new TreeMap());
             out.write(r1);
             out.write(r1);
@@ -82,7 +83,7 @@ public class ToCpp extends TestCase {
             r1.setIntVal(4567);
             r1.setIntVal(4567);
             r1.setLongVal(0x5a5a5a5a5a5aL);
             r1.setLongVal(0x5a5a5a5a5a5aL);
             r1.setStringVal(new Text("random text"));
             r1.setStringVal(new Text("random text"));
-            r1.setBufferVal(new ByteArrayOutputStream(20));
+            r1.setBufferVal(new BytesWritable());
             r1.setVectorVal(new ArrayList());
             r1.setVectorVal(new ArrayList());
             r1.setMapVal(new TreeMap());
             r1.setMapVal(new TreeMap());
             out.write(r1);
             out.write(r1);
@@ -106,7 +107,7 @@ public class ToCpp extends TestCase {
             r1.setIntVal(4567);
             r1.setIntVal(4567);
             r1.setLongVal(0x5a5a5a5a5a5aL);
             r1.setLongVal(0x5a5a5a5a5a5aL);
             r1.setStringVal(new Text("random text"));
             r1.setStringVal(new Text("random text"));
-            r1.setBufferVal(new ByteArrayOutputStream(20));
+            r1.setBufferVal(new BytesWritable());
             r1.setVectorVal(new ArrayList());
             r1.setVectorVal(new ArrayList());
             r1.setMapVal(new TreeMap());
             r1.setMapVal(new TreeMap());
             out.write(r1);
             out.write(r1);