Procházet zdrojové kódy

Preparing for 0.3.1 release. merge -r 411302:411936 from trunk.

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/branches/branch-0.3@411937 13f79535-47bb-0310-9956-ffa450edef68
Doug Cutting před 19 roky
rodič
revize
1fb8f51a54
42 změnil soubory, kde provedl 208 přidání a 92 odebrání
  1. 19 0
      CHANGES.txt
  2. 3 2
      bin/hadoop
  3. 1 0
      bin/hadoop-daemon.sh
  4. 3 1
      build.xml
  5. 6 7
      conf/log4j.properties
  6. 16 7
      site/index.html
  7. 20 9
      site/index.pdf
  8. 1 0
      src/java/org/apache/hadoop/mapred/ReduceTaskRunner.java
  9. 1 1
      src/java/org/apache/hadoop/record/BinaryInputArchive.java
  10. 1 1
      src/java/org/apache/hadoop/record/BinaryOutputArchive.java
  11. 1 1
      src/java/org/apache/hadoop/record/CsvInputArchive.java
  12. 1 1
      src/java/org/apache/hadoop/record/CsvOutputArchive.java
  13. 1 1
      src/java/org/apache/hadoop/record/InputArchive.java
  14. 1 1
      src/java/org/apache/hadoop/record/OutputArchive.java
  15. 1 1
      src/java/org/apache/hadoop/record/RecordReader.java
  16. 2 2
      src/java/org/apache/hadoop/record/RecordWriter.java
  17. 1 1
      src/java/org/apache/hadoop/record/Utils.java
  18. 1 1
      src/java/org/apache/hadoop/record/XmlInputArchive.java
  19. 1 1
      src/java/org/apache/hadoop/record/XmlOutputArchive.java
  20. 18 5
      src/java/org/apache/hadoop/record/compiler/CppGenerator.java
  21. 1 1
      src/java/org/apache/hadoop/record/compiler/JBoolean.java
  22. 1 1
      src/java/org/apache/hadoop/record/compiler/JBuffer.java
  23. 1 1
      src/java/org/apache/hadoop/record/compiler/JByte.java
  24. 8 6
      src/java/org/apache/hadoop/record/compiler/JCompType.java
  25. 1 1
      src/java/org/apache/hadoop/record/compiler/JDouble.java
  26. 1 1
      src/java/org/apache/hadoop/record/compiler/JField.java
  27. 15 3
      src/java/org/apache/hadoop/record/compiler/JFile.java
  28. 1 1
      src/java/org/apache/hadoop/record/compiler/JFloat.java
  29. 1 1
      src/java/org/apache/hadoop/record/compiler/JInt.java
  30. 1 1
      src/java/org/apache/hadoop/record/compiler/JLong.java
  31. 1 1
      src/java/org/apache/hadoop/record/compiler/JMap.java
  32. 2 2
      src/java/org/apache/hadoop/record/compiler/JRecord.java
  33. 1 1
      src/java/org/apache/hadoop/record/compiler/JString.java
  34. 22 21
      src/java/org/apache/hadoop/record/compiler/JType.java
  35. 1 1
      src/java/org/apache/hadoop/record/compiler/JVector.java
  36. 13 3
      src/java/org/apache/hadoop/record/compiler/JavaGenerator.java
  37. 12 0
      src/java/org/apache/hadoop/record/compiler/generated/package.html
  38. 14 0
      src/java/org/apache/hadoop/record/compiler/package.html
  39. 9 0
      src/site/src/documentation/content/xdocs/index.xml
  40. 1 1
      src/test/org/apache/hadoop/record/test/FromCpp.java
  41. 1 1
      src/test/org/apache/hadoop/record/test/TestRecordIO.java
  42. 1 1
      src/test/org/apache/hadoop/record/test/ToCpp.java

+ 19 - 0
CHANGES.txt

@@ -1,6 +1,25 @@
 Hadoop Change Log
 
 
+Release 0.3.1 - 2006-06-05
+
+ 1. HADOOP-272.  Fix a bug in bin/hadoop setting log
+    parameters. (omalley & cutting)
+
+ 2. HADOOP-274.  Change applications to log to standard output rather
+    than to a rolling log file like daemons.  (omalley via cutting)
+
+ 3. HADOOP-262.  Fix reduce tasks to report progress while they're
+    waiting for map outputs, so that they do not time out.
+    (Mahadev Konar via cutting)
+
+ 4. HADOOP-245 and HADOOP-246.  Improvements to record io package.  
+    (Mahadev Konar via cutting)
+
+ 5. HADOOP-276.  Add logging config files to jar file so that they're
+    always found.  (omalley via cutting)
+
+
 Release 0.3.0 - 2006-06-02
 
  1. HADOOP-208.  Enhance MapReduce web interface, adding new pages

+ 3 - 2
bin/hadoop

@@ -121,10 +121,10 @@ done
 unset IFS
 
 # default log directory & file
-if [ "HADOOP_LOG_DIR" = "" ]; then
+if [ "$HADOOP_LOG_DIR" = "" ]; then
   HADOOP_LOG_DIR="$HADOOP_HOME/logs"
 fi
-if [ "HADOOP_LOGFILE" = "" ]; then
+if [ "$HADOOP_LOGFILE" = "" ]; then
   HADOOP_LOGFILE='hadoop.log'
 fi
 
@@ -162,6 +162,7 @@ HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.dir=$HADOOP_LOG_DIR"
 HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.file=$HADOOP_LOGFILE"
 HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.home.dir=$HADOOP_HOME"
 HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.id.str=$HADOOP_IDENT_STRING"
+HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.root.logger=${HADOOP_ROOT_LOGGER:-INFO,stdout}"
 
 # run it
 exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"

+ 1 - 0
bin/hadoop-daemon.sh

@@ -63,6 +63,7 @@ fi
 
 # some variables
 export HADOOP_LOGFILE=hadoop-$HADOOP_IDENT_STRING-$command-`hostname`.log
+export HADOOP_ROOT_LOGGER="INFO,DRFA"
 log=$HADOOP_LOG_DIR/hadoop-$HADOOP_IDENT_STRING-$command-`hostname`.out
 pid=$HADOOP_PID_DIR/hadoop-$HADOOP_IDENT_STRING-$command.pid
 

+ 3 - 1
build.xml

@@ -9,7 +9,7 @@
  
   <property name="Name" value="Hadoop"/>
   <property name="name" value="hadoop"/>
-  <property name="version" value="0.3.1-dev"/>
+  <property name="version" value="0.3.2-dev"/>
   <property name="final.name" value="${name}-${version}"/>
   <property name="year" value="2006"/>
   <property name="libhdfs.version" value="1"/>
@@ -181,6 +181,8 @@
          basedir="${build.classes}">
       <fileset file="${conf.dir}/hadoop-default.xml"/>
       <fileset file="${conf.dir}/mapred-default.xml"/>
+      <fileset file="${conf.dir}/commons-logging.properties"/>
+      <fileset file="${conf.dir}/log4j.properties"/>
       <zipfileset dir="${build.webapps}" prefix="webapps"/>
     </jar>
   </target>

+ 6 - 7
conf/log4j.properties

@@ -1,5 +1,5 @@
 # RootLogger - DailyRollingFileAppender
-log4j.rootLogger=INFO,DRFA
+log4j.rootLogger=${hadoop.root.logger}
 
 # Logging Threshold
 log4j.threshhold=ALL
@@ -20,8 +20,8 @@ log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
 log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
 
 # Pattern format: Date LogLevel LoggerName LogMessage
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
-# Debugging Pattern format: Date LogLevel LoggerName (FileName:MethodName:LineNo) LogMessage
+log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
 #log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
 
 
@@ -30,10 +30,9 @@ log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
 # Add *stdout* to rootlogger above if you want to use this 
 #
 
-#log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-#log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-#log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
-
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
 
 #
 # Rolling File Appender

+ 16 - 7
site/index.html

@@ -122,6 +122,9 @@ document.write("<text>Last Published:</text> " + document.lastModified);
 <a href="#News">News</a>
 <ul class="minitoc">
 <li>
+<a href="#5+June%2C+2006%3A+release+0.3.1+available">5 June, 2006: release 0.3.1 available</a>
+</li>
+<li>
 <a href="#2+June%2C+2006%3A+release+0.3.0+available">2 June, 2006: release 0.3.0 available</a>
 </li>
 <li>
@@ -151,20 +154,26 @@ document.write("<text>Last Published:</text> " + document.lastModified);
 <a name="N1000C"></a><a name="News"></a>
 <h2 class="h3">News</h2>
 <div class="section">
-<a name="N10012"></a><a name="2+June%2C+2006%3A+release+0.3.0+available"></a>
+<a name="N10012"></a><a name="5+June%2C+2006%3A+release+0.3.1+available"></a>
+<h3 class="h4">5 June, 2006: release 0.3.1 available</h3>
+<p>This is a bugfix release.  For details see the <a href="http://tinyurl.com/l6on4">change log</a>. The release can
+      be obtained from <a href="http://www.apache.org/dyn/closer.cgi/lucene/hadoop/"> a
+      nearby mirror</a>.
+      </p>
+<a name="N10024"></a><a name="2+June%2C+2006%3A+release+0.3.0+available"></a>
 <h3 class="h4">2 June, 2006: release 0.3.0 available</h3>
 <p>This includes many fixes, improving performance, scalability
       and reliability and adding new features.  For details see the <a href="http://tinyurl.com/rq3f7">change log</a>. The release can
       be obtained from <a href="http://www.apache.org/dyn/closer.cgi/lucene/hadoop/"> a
       nearby mirror</a>.
       </p>
-<a name="N10024"></a><a name="12+May%2C+2006%3A+release+0.2.1+available"></a>
+<a name="N10036"></a><a name="12+May%2C+2006%3A+release+0.2.1+available"></a>
 <h3 class="h4">12 May, 2006: release 0.2.1 available</h3>
 <p>This fixes a few bugs in release 0.2.0, listed in the <a href="http://tinyurl.com/rnnvz">change log</a>. The
       release can be obtained from <a href="http://www.apache.org/dyn/closer.cgi/lucene/hadoop/"> a
       nearby mirror</a>.
       </p>
-<a name="N10036"></a><a name="5+May%2C+2006%3A+release+0.2.0+available"></a>
+<a name="N10048"></a><a name="5+May%2C+2006%3A+release+0.2.0+available"></a>
 <h3 class="h4">5 May, 2006: release 0.2.0 available</h3>
 <p>We are now aiming for monthly releases.  There have been many
       bug fixes and improvements in the past month.  MapReduce and DFS
@@ -173,24 +182,24 @@ document.write("<text>Last Published:</text> " + document.lastModified);
       details. The release can be obtained from <a href="http://www.apache.org/dyn/closer.cgi/lucene/hadoop/"> a
       nearby mirror</a>.
       </p>
-<a name="N10048"></a><a name="2+April%2C+2006%3A+release+0.1.0+available"></a>
+<a name="N1005A"></a><a name="2+April%2C+2006%3A+release+0.1.0+available"></a>
 <h3 class="h4">2 April, 2006: release 0.1.0 available</h3>
 <p>This is the first Hadoop release.  The release is available
       <a href="http://www.apache.org/dyn/closer.cgi/lucene/hadoop/">
       here</a>.</p>
-<a name="N10056"></a><a name="6+February%2C+2006%3A+nightly+builds"></a>
+<a name="N10068"></a><a name="6+February%2C+2006%3A+nightly+builds"></a>
 <h3 class="h4">6 February, 2006: nightly builds</h3>
 <p>Hadoop now has nightly builds.  This automatically creates a
       <a href="http://cvs.apache.org/dist/lucene/hadoop/nightly/">downloadable version of Hadoop every
       night</a>.  All unit tests must pass, or a message is sent to
       the developers mailing list and no new version is created.  This
       also updates the <a href="docs/api/">javadoc</a>.</p>
-<a name="N10068"></a><a name="3+February%2C+2006%3A+Hadoop+code+moved+out+of+Nutch"></a>
+<a name="N1007A"></a><a name="3+February%2C+2006%3A+Hadoop+code+moved+out+of+Nutch"></a>
 <h3 class="h4">3 February, 2006: Hadoop code moved out of Nutch</h3>
 <p>The Hadoop code has now been moved into its own Subversion
       tree, renamed into packages under <span class="codefrag">org.apache.hadoop</span>.
       All unit tests pass, but little else has yet been tested.</p>
-<a name="N10075"></a><a name="30+March%2C+2006%3A+Hadoop+project+approved"></a>
+<a name="N10087"></a><a name="30+March%2C+2006%3A+Hadoop+project+approved"></a>
 <h3 class="h4">30 March, 2006: Hadoop project approved</h3>
 <p>The Lucene PMC has elected to split the Nutch MapReduce and
       distributed filesytem code into a new project named Hadoop.</p>

Rozdílová data souboru nebyla zobrazena, protože soubor je příliš velký
+ 20 - 9
site/index.pdf


+ 1 - 0
src/java/org/apache/hadoop/mapred/ReduceTaskRunner.java

@@ -419,6 +419,7 @@ class ReduceTaskRunner extends TaskRunner {
       // new, just wait for a bit
       try {
         if (numInFlight == 0 && numScheduled == 0) {
+          getTask().reportProgress(getTracker());
           Thread.sleep(5000);
         }
       } catch (InterruptedException e) { } // IGNORE

+ 1 - 1
src/java/org/apache/hadoop/record/BinaryInputArchive.java

@@ -25,7 +25,7 @@ import java.io.InputStream;
 
 /**
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 public class BinaryInputArchive implements InputArchive {
     

+ 1 - 1
src/java/org/apache/hadoop/record/BinaryOutputArchive.java

@@ -26,7 +26,7 @@ import java.io.OutputStream;
 
 /**
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 public class BinaryOutputArchive implements OutputArchive {
     

+ 1 - 1
src/java/org/apache/hadoop/record/CsvInputArchive.java

@@ -26,7 +26,7 @@ import java.io.UnsupportedEncodingException;
 
 /**
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 class CsvInputArchive implements InputArchive {
     

+ 1 - 1
src/java/org/apache/hadoop/record/CsvOutputArchive.java

@@ -26,7 +26,7 @@ import java.io.UnsupportedEncodingException;
 
 /**
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 public class CsvOutputArchive implements OutputArchive {
 

+ 1 - 1
src/java/org/apache/hadoop/record/InputArchive.java

@@ -24,7 +24,7 @@ import java.util.ArrayList;
 /**
  * Interface that all the Deserializers have to implement.
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 public interface InputArchive {
     public byte readByte(String tag) throws IOException;

+ 1 - 1
src/java/org/apache/hadoop/record/OutputArchive.java

@@ -24,7 +24,7 @@ import java.util.ArrayList;
 /**
  * Interface that alll the serializers have to implement.
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 public interface OutputArchive {
     public void writeByte(byte b, String tag) throws IOException;

+ 1 - 1
src/java/org/apache/hadoop/record/RecordReader.java

@@ -30,7 +30,7 @@ import org.xml.sax.SAXException;
  * Front-end interface to deserializers. Also acts as a factory
  * for deserializers.
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 public class RecordReader {
     

+ 2 - 2
src/java/org/apache/hadoop/record/RecordWriter.java

@@ -30,7 +30,7 @@ import org.xml.sax.SAXException;
 /**
  * Front-end for serializers. Also serves as a factory for serializers.
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 public class RecordWriter {
     
@@ -112,4 +112,4 @@ public class RecordWriter {
     public void write(Record r) throws IOException {
         r.serialize(archive, "");
     }
-}
+}

+ 1 - 1
src/java/org/apache/hadoop/record/Utils.java

@@ -24,7 +24,7 @@ import java.io.UnsupportedEncodingException;
 
 /**
  * Various utility functions for Hadooop record I/O runtime.
- * @author milindb@yahoo-inc.com
+ * @author Milind Bhandarkar
  */
 public class Utils {
     

+ 1 - 1
src/java/org/apache/hadoop/record/XmlInputArchive.java

@@ -29,7 +29,7 @@ import javax.xml.parsers.ParserConfigurationException;
 import javax.xml.parsers.SAXParser;
 /**
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 class XmlInputArchive implements InputArchive {
     

+ 1 - 1
src/java/org/apache/hadoop/record/XmlOutputArchive.java

@@ -26,7 +26,7 @@ import java.util.Stack;
 
 /**
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 class XmlOutputArchive implements OutputArchive {
 

+ 18 - 5
src/java/org/apache/hadoop/record/compiler/CppGenerator.java

@@ -23,22 +23,35 @@ import java.io.IOException;
 import java.util.Iterator;
 
 /**
+ * C++ Code generator front-end for Hadoop record I/O.
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 class CppGenerator {
+    private String mFullName;
     private String mName;
     private ArrayList mInclFiles;
     private ArrayList mRecList;
     
-    /** Creates a new instance of CppGenerator */
-    public CppGenerator(String name, ArrayList ilist, ArrayList rlist) {
-        mName = name;
+    /** Creates a new instance of CppGenerator
+     *
+     * @param name possibly full pathname to the file
+     * @param ilist included files (as JFile)
+     * @param rlist List of records defined within this file
+     */
+    CppGenerator(String name, ArrayList ilist, ArrayList rlist) {
+        mFullName = name;
+        mName = (new File(name)).getName();
         mInclFiles = ilist;
         mRecList = rlist;
     }
     
-    public void genCode() throws IOException {
+    /**
+     * Generate C++ code. This method only creates the requested file(s)
+     * and spits-out file-level elements (such as include statements etc.)
+     * record-level code is generated by JRecord.
+     */
+    void genCode() throws IOException {
         FileWriter cc = new FileWriter(mName+".cc");
         FileWriter hh = new FileWriter(mName+".hh");
         hh.write("#ifndef __"+mName.toUpperCase().replace('.','_')+"__\n");

+ 1 - 1
src/java/org/apache/hadoop/record/compiler/JBoolean.java

@@ -18,7 +18,7 @@ package org.apache.hadoop.record.compiler;
 
 /**
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 public class JBoolean extends JType {
     

+ 1 - 1
src/java/org/apache/hadoop/record/compiler/JBuffer.java

@@ -18,7 +18,7 @@ package org.apache.hadoop.record.compiler;
 
 /**
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 public class JBuffer extends JCompType {
     

+ 1 - 1
src/java/org/apache/hadoop/record/compiler/JByte.java

@@ -18,7 +18,7 @@ package org.apache.hadoop.record.compiler;
 
 /**
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 public class JByte extends JType {
     

+ 8 - 6
src/java/org/apache/hadoop/record/compiler/JCompType.java

@@ -17,17 +17,19 @@
 package org.apache.hadoop.record.compiler;
 
 /**
+ * Abstract base class for all the "compound" types such as ustring,
+ * buffer, vector, map, and record.
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 abstract class JCompType extends JType {
     
     /** Creates a new instance of JCompType */
-    public JCompType(String cppType, String javaType, String suffix, String wrapper) {
+    JCompType(String cppType, String javaType, String suffix, String wrapper) {
         super(cppType, javaType, suffix, wrapper, null);
     }
     
-    public String genCppGetSet(String fname, int fIdx) {
+    String genCppGetSet(String fname, int fIdx) {
         String cgetFunc = "  virtual const "+getCppType()+"& get"+fname+"() const {\n";
         cgetFunc += "    return m"+fname+";\n";
         cgetFunc += "  }\n";
@@ -37,15 +39,15 @@ abstract class JCompType extends JType {
         return cgetFunc + getFunc;
     }
     
-    public String genJavaCompareTo(String fname) {
+    String genJavaCompareTo(String fname) {
         return "    ret = "+fname+".compareTo(peer."+fname+");\n";
     }
     
-    public String genJavaEquals(String fname, String peer) {
+    String genJavaEquals(String fname, String peer) {
         return "    ret = "+fname+".equals("+peer+");\n";
     }
     
-    public String genJavaHashCode(String fname) {
+    String genJavaHashCode(String fname) {
         return "    ret = "+fname+".hashCode();\n";
     }
 }

+ 1 - 1
src/java/org/apache/hadoop/record/compiler/JDouble.java

@@ -18,7 +18,7 @@ package org.apache.hadoop.record.compiler;
 
 /**
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 public class JDouble extends JType {
     

+ 1 - 1
src/java/org/apache/hadoop/record/compiler/JField.java

@@ -18,7 +18,7 @@ package org.apache.hadoop.record.compiler;
 
 /**
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 public class JField {
     private JType mType;

+ 15 - 3
src/java/org/apache/hadoop/record/compiler/JFile.java

@@ -20,8 +20,11 @@ import java.io.IOException;
 import java.util.ArrayList;
 
 /**
+ * Container for the Hadoop Record DDL.
+ * The main components of the file are filename, list of included files,
+ * and records defined in that file.
  *
- * @author milindb@yahoo-inc.com
+ * @author Milind Bhandarkar
  */
 public class JFile {
     
@@ -29,18 +32,27 @@ public class JFile {
     private ArrayList mInclFiles;
     private ArrayList mRecords;
     
-    /** Creates a new instance of JFile */
+    /** Creates a new instance of JFile
+     *
+     * @param name possibly full pathname to the file
+     * @param inclFiles included files (as JFile)
+     * @param recList List of records defined within this file
+     */
     public JFile(String name, ArrayList inclFiles, ArrayList recList) {
         mName = name;
         mInclFiles = inclFiles;
         mRecords = recList;
     }
-        
+    
+    /** Strip the other pathname components and return the basename */
     String getName() {
         int idx = mName.lastIndexOf('/');
         return (idx > 0) ? mName.substring(idx) : mName; 
     }
     
+    /** Generate record code in given language. Language should be all
+     *  lowercase.
+     */
     public void genCode(String language) throws IOException {
         if ("c++".equals(language)) {
             CppGenerator gen = new CppGenerator(mName, mInclFiles, mRecords);

+ 1 - 1
src/java/org/apache/hadoop/record/compiler/JFloat.java

@@ -18,7 +18,7 @@ package org.apache.hadoop.record.compiler;
 
 /**
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 public class JFloat extends JType {
     

+ 1 - 1
src/java/org/apache/hadoop/record/compiler/JInt.java

@@ -18,7 +18,7 @@ package org.apache.hadoop.record.compiler;
 
 /**
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 public class JInt extends JType {
     

+ 1 - 1
src/java/org/apache/hadoop/record/compiler/JLong.java

@@ -18,7 +18,7 @@ package org.apache.hadoop.record.compiler;
 
 /**
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 public class JLong extends JType {
     

+ 1 - 1
src/java/org/apache/hadoop/record/compiler/JMap.java

@@ -18,7 +18,7 @@ package org.apache.hadoop.record.compiler;
 
 /**
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 public class JMap extends JCompType {
    

+ 2 - 2
src/java/org/apache/hadoop/record/compiler/JRecord.java

@@ -24,7 +24,7 @@ import java.util.Iterator;
 
 /**
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 public class JRecord extends JCompType {
 
@@ -132,7 +132,7 @@ public class JRecord extends JCompType {
             hh.write(jf.genCppGetSet(fIdx));
         }
         hh.write("}; // end record "+getName()+"\n");
-        for (int i=0; i<ns.length; i++) {
+        for (int i=ns.length-1; i>=0; i--) {
             hh.write("} // end namespace "+ns[i]+"\n");
         }
         cc.write("void "+getCppFQName()+"::serialize(::hadoop::OArchive& a_, const char* tag) {\n");

+ 1 - 1
src/java/org/apache/hadoop/record/compiler/JString.java

@@ -18,7 +18,7 @@ package org.apache.hadoop.record.compiler;
 
 /**
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 public class JString extends JCompType {
     

+ 22 - 21
src/java/org/apache/hadoop/record/compiler/JType.java

@@ -17,10 +17,11 @@
 package org.apache.hadoop.record.compiler;
 
 /**
- *
- * @author milindb
+ * Abstract Base class for all types supported by Hadoop Record I/O.
+ * 
+ * @author Milind Bhandarkar
  */
-public abstract class JType {
+abstract public class JType {
     
     private String mCppName;
     private String mJavaName;
@@ -31,7 +32,7 @@ public abstract class JType {
     /**
      * Creates a new instance of JType
      */
-    public JType(String cppname, String javaname, String suffix, String wrapper, String unwrap) {
+    JType(String cppname, String javaname, String suffix, String wrapper, String unwrap) {
         mCppName = cppname;
         mJavaName = javaname;
         mMethodSuffix = suffix;
@@ -39,21 +40,21 @@ public abstract class JType {
         mUnwrapMethod = unwrap;
     }
     
-    abstract public String getSignature();
+    abstract String getSignature();
     
-    public String genCppDecl(String fname) {
+    String genCppDecl(String fname) {
         return "  "+mCppName+" m"+fname+";\n"; 
     }
     
-    public String genJavaDecl (String fname) {
+    String genJavaDecl (String fname) {
         return "  private "+mJavaName+" m"+fname+";\n";
     }
     
-    public String genJavaConstructorParam (int fIdx) {
+    String genJavaConstructorParam (int fIdx) {
         return "        "+mJavaName+" m"+fIdx;
     }
     
-    public String genCppGetSet(String fname, int fIdx) {
+    String genCppGetSet(String fname, int fIdx) {
         String getFunc = "  virtual "+mCppName+" get"+fname+"() const {\n";
         getFunc += "    return m"+fname+";\n";
         getFunc += "  }\n";
@@ -63,7 +64,7 @@ public abstract class JType {
         return getFunc+setFunc;
     }
     
-    public String genJavaGetSet(String fname, int fIdx) {
+    String genJavaGetSet(String fname, int fIdx) {
         String getFunc = "  public "+mJavaName+" get"+fname+"() {\n";
         getFunc += "    return m"+fname+";\n";
         getFunc += "  }\n";
@@ -73,31 +74,31 @@ public abstract class JType {
         return getFunc+setFunc;
     }
     
-    public String getCppType() {
+    String getCppType() {
         return mCppName;
     }
     
-    public String getJavaType() {
+    String getJavaType() {
         return mJavaName;
     }
    
-    public String getJavaWrapperType() {
+    String getJavaWrapperType() {
         return mWrapper;
     }
     
-    public String getMethodSuffix() {
+    String getMethodSuffix() {
         return mMethodSuffix;
     }
     
-    public String genJavaWriteMethod(String fname, String tag) {
+    String genJavaWriteMethod(String fname, String tag) {
         return "    a_.write"+mMethodSuffix+"("+fname+",\""+tag+"\");\n";
     }
     
-    public String genJavaReadMethod(String fname, String tag) {
+    String genJavaReadMethod(String fname, String tag) {
         return "    "+fname+"=a_.read"+mMethodSuffix+"(\""+tag+"\");\n";
     }
     
-    public String genJavaReadWrapper(String fname, String tag, boolean decl) {
+    String genJavaReadWrapper(String fname, String tag, boolean decl) {
         String ret = "";
         if (decl) {
             ret = "    "+mWrapper+" "+fname+";\n";
@@ -105,19 +106,19 @@ public abstract class JType {
         return ret + "    "+fname+"=new "+mWrapper+"(a_.read"+mMethodSuffix+"(\""+tag+"\"));\n";
     }
     
-    public String genJavaWriteWrapper(String fname, String tag) {
+    String genJavaWriteWrapper(String fname, String tag) {
         return "        a_.write"+mMethodSuffix+"("+fname+"."+mUnwrapMethod+"(),\""+tag+"\");\n";
     }
     
-    public String genJavaCompareTo(String fname) {
+    String genJavaCompareTo(String fname) {
         return "    ret = ("+fname+" == peer."+fname+")? 0 :(("+fname+"<peer."+fname+")?-1:1);\n";
     }
     
-    public String genJavaEquals(String fname, String peer) {
+    String genJavaEquals(String fname, String peer) {
         return "    ret = ("+fname+"=="+peer+");\n";
     }
     
-    public String genJavaHashCode(String fname) {
+    String genJavaHashCode(String fname) {
         return "    ret = (int)"+fname+";\n";
     }
 

+ 1 - 1
src/java/org/apache/hadoop/record/compiler/JVector.java

@@ -18,7 +18,7 @@ package org.apache.hadoop.record.compiler;
 
 /**
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 public class JVector extends JCompType {
     

+ 13 - 3
src/java/org/apache/hadoop/record/compiler/JavaGenerator.java

@@ -23,22 +23,32 @@ import java.io.IOException;
 import java.util.Iterator;
 
 /**
+ * Java Code generator front-end for Hadoop record I/O.
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 class JavaGenerator {
     private String mName;
     private ArrayList mInclFiles;
     private ArrayList mRecList;
     
-    /** Creates a new instance of JavaGenerator */
+    /** Creates a new instance of JavaGenerator
+     *
+     * @param name possibly full pathname to the file
+     * @param incl included files (as JFile)
+     * @param records List of records defined within this file
+     */
     JavaGenerator(String name, ArrayList incl, ArrayList records) {
         mName = name;
         mInclFiles = incl;
         mRecList = records;
     }
     
-    public void genCode() throws IOException {
+    /**
+     * Generate Java code for records. This method is only a front-end to
+     * JRecord, since one file is generated for each record.
+     */
+    void genCode() throws IOException {
         for (Iterator i = mRecList.iterator(); i.hasNext(); ) {
             JRecord rec = (JRecord) i.next();
             rec.genJavaCode();

+ 12 - 0
src/java/org/apache/hadoop/record/compiler/generated/package.html

@@ -0,0 +1,12 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+
+<html>
+  <head>
+    <title>Hadoop Record Compiler: Parser</title>
+  </head>
+  <body>
+  This package contains code generated by JavaCC from the
+  Hadoop record syntax file rcc.jj. For details about the
+  record file syntax please @see org.apache.hadoop.record.
+  </body>
+</html>

+ 14 - 0
src/java/org/apache/hadoop/record/compiler/package.html

@@ -0,0 +1,14 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+
+<html>
+  <head>
+    <title>Hadoop Record Compiler</title>
+  </head>
+  <body>
+  This package contains classes needed for code generation
+  from the hadoop record compiler. CppGenerator and JavaGenerator
+  are the main entry points from the parser. There are classes
+  corrsponding to every primitive type and compound type
+  included in Hadoop record I/O syntax.
+  </body>
+</html>

+ 9 - 0
src/site/src/documentation/content/xdocs/index.xml

@@ -14,6 +14,15 @@
     <section>
       <title>News</title>
 
+      <section>
+      <title>5 June, 2006: release 0.3.1 available</title>
+      <p>This is a bugfix release.  For details see the <a
+      href="http://tinyurl.com/l6on4">change log</a>. The release can
+      be obtained from <a
+      href="http://www.apache.org/dyn/closer.cgi/lucene/hadoop/"> a
+      nearby mirror</a>.
+      </p> </section>
+
       <section>
       <title>2 June, 2006: release 0.3.0 available</title>
       <p>This includes many fixes, improving performance, scalability

+ 1 - 1
src/test/org/apache/hadoop/record/test/FromCpp.java

@@ -29,7 +29,7 @@ import junit.framework.*;
 
 /**
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 public class FromCpp extends TestCase {
     

+ 1 - 1
src/test/org/apache/hadoop/record/test/TestRecordIO.java

@@ -29,7 +29,7 @@ import java.util.TreeMap;
 
 /**
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 public class TestRecordIO extends TestCase {
     

+ 1 - 1
src/test/org/apache/hadoop/record/test/ToCpp.java

@@ -29,7 +29,7 @@ import java.util.TreeMap;
 
 /**
  *
- * @author milindb
+ * @author Milind Bhandarkar
  */
 public class ToCpp extends TestCase {
     

Některé soubory nejsou zobrazeny, neboť je v těchto rozdílových datech změněno mnoho souborů