Browse Source

Merge trunk into HDFS-1623 post-mavenization.

- added CHANGES.HDFS-1623.txt to findbugs exclude
- added jsch dependency


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-1623@1159782 13f79535-47bb-0310-9956-ffa450edef68
Todd Lipcon 14 years ago
parent
commit
b0ea4b71c9
100 changed files with 685 additions and 605 deletions
  1. 2 0
      .gitignore
  2. 5 2
      BUILDING.txt
  3. 3 3
      dev-support/smart-apply-patch.sh
  4. 6 6
      hadoop-assemblies/src/main/resources/assemblies/hadoop-bintar.xml
  5. 13 3
      hadoop-assemblies/src/main/resources/assemblies/hadoop-tar.xml
  6. 10 0
      hadoop-common/CHANGES.txt
  7. 16 568
      hadoop-common/pom.xml
  8. 111 0
      hadoop-common/src/main/java/org/apache/hadoop/io/ShortWritable.java
  9. 3 2
      hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java
  10. 3 3
      hadoop-common/src/main/java/org/apache/hadoop/ipc/AvroRpcEngine.java
  11. 2 2
      hadoop-common/src/main/java/org/apache/hadoop/ipc/AvroSpecificRpcEngine.java
  12. 0 0
      hadoop-common/src/test/avro/AvroSpecificTestProtocol.avpr
  13. 0 0
      hadoop-common/src/test/avro/avroRecord.avsc
  14. 6 5
      hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java
  15. 35 4
      hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java
  16. 1 1
      hadoop-common/src/test/java/org/apache/hadoop/ipc/AvroTestProtocol.java
  17. 3 3
      hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAvroRpc.java
  18. 0 0
      hadoop-hdfs/CHANGES.HDFS-1623.txt
  19. 11 0
      hadoop-hdfs/CHANGES.txt
  20. 0 0
      hadoop-hdfs/LICENSE.txt
  21. 0 0
      hadoop-hdfs/NOTICE.txt
  22. 0 0
      hadoop-hdfs/dev-support/all-tests
  23. 0 0
      hadoop-hdfs/dev-support/checkstyle-noframes-sorted.xsl
  24. 3 3
      hadoop-hdfs/dev-support/checkstyle.xml
  25. 0 0
      hadoop-hdfs/dev-support/commit-tests
  26. 0 0
      hadoop-hdfs/dev-support/findbugsExcludeFile.xml
  27. 0 0
      hadoop-hdfs/dev-support/jdiff/hadoop-hdfs_0.20.0.xml
  28. 0 0
      hadoop-hdfs/dev-support/jdiff/hadoop-hdfs_0.21.0.xml
  29. 0 0
      hadoop-hdfs/dev-support/smoke-tests
  30. 3 0
      hadoop-hdfs/dev-support/test-patch.properties
  31. 411 0
      hadoop-hdfs/pom.xml
  32. 0 0
      hadoop-hdfs/src/ant/org/apache/hadoop/ant/DfsTask.java
  33. 0 0
      hadoop-hdfs/src/ant/org/apache/hadoop/ant/antlib.xml
  34. 0 0
      hadoop-hdfs/src/ant/org/apache/hadoop/ant/condition/DfsBaseConditional.java
  35. 0 0
      hadoop-hdfs/src/ant/org/apache/hadoop/ant/condition/DfsExists.java
  36. 0 0
      hadoop-hdfs/src/ant/org/apache/hadoop/ant/condition/DfsIsDir.java
  37. 0 0
      hadoop-hdfs/src/ant/org/apache/hadoop/ant/condition/DfsZeroLen.java
  38. 0 0
      hadoop-hdfs/src/contrib/build-contrib.xml
  39. 0 0
      hadoop-hdfs/src/contrib/build.xml
  40. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/Makefile.am
  41. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/README
  42. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/acinclude.m4
  43. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/build.xml
  44. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/configure.ac
  45. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/global_footer.mk
  46. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/global_header.mk
  47. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/ivy.xml
  48. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/ivy/libraries.properties
  49. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/Makefile.am
  50. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_connect.c
  51. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_connect.h
  52. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_context_handle.h
  53. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_dfs.c
  54. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_dfs.h
  55. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_dfs_wrapper.sh
  56. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_file_handle.h
  57. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls.h
  58. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_access.c
  59. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_chmod.c
  60. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_chown.c
  61. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_create.c
  62. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_flush.c
  63. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_getattr.c
  64. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_mkdir.c
  65. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_mknod.c
  66. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_open.c
  67. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_read.c
  68. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_readdir.c
  69. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_release.c
  70. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_rename.c
  71. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_rmdir.c
  72. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_statfs.c
  73. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_symlink.c
  74. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_truncate.c
  75. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_unlink.c
  76. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_utimens.c
  77. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_write.c
  78. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_init.c
  79. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_init.h
  80. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_options.c
  81. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_options.h
  82. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_stat_struct.c
  83. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_stat_struct.h
  84. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_trash.c
  85. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_trash.h
  86. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_users.c
  87. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_users.h
  88. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/test/TestFuseDFS.java
  89. 0 0
      hadoop-hdfs/src/main/bin/distribute-exclude.sh
  90. 0 0
      hadoop-hdfs/src/main/bin/hdfs
  91. 0 0
      hadoop-hdfs/src/main/bin/hdfs-config.sh
  92. 0 0
      hadoop-hdfs/src/main/bin/refresh-namenodes.sh
  93. 0 0
      hadoop-hdfs/src/main/bin/start-balancer.sh
  94. 0 0
      hadoop-hdfs/src/main/bin/start-dfs.sh
  95. 0 0
      hadoop-hdfs/src/main/bin/start-secure-dns.sh
  96. 0 0
      hadoop-hdfs/src/main/bin/stop-balancer.sh
  97. 0 0
      hadoop-hdfs/src/main/bin/stop-dfs.sh
  98. 0 0
      hadoop-hdfs/src/main/bin/stop-secure-dns.sh
  99. 17 0
      hadoop-hdfs/src/main/conf/hadoop-metrics2.properties
  100. 21 0
      hadoop-hdfs/src/main/conf/hdfs-site.xml

+ 2 - 0
.gitignore

@@ -4,4 +4,6 @@
 .idea
 .svn
 .classpath
+.project
+.settings
 target

+ 5 - 2
hadoop-common/BUILDING.txt → BUILDING.txt

@@ -1,3 +1,6 @@
+
+Build instructions for Hadoop Common/HDFS using Maven
+
 ----------------------------------------------------------------------------------
 Requirements:
 
@@ -16,7 +19,8 @@ Maven modules:
          - hadoop-project     (Parent POM for all Hadoop Maven modules.             )
                               (All plugins & dependencies versions are defined here.)
          - hadoop-annotations (Generates the Hadoop doclet used to generated the Javadocs)
-         - hadoop-common      (Hadoop common)
+         - hadoop-common      (Hadoop Common)
+         - hadoop-hdfs        (Hadoop HDFS)
 
 ----------------------------------------------------------------------------------
 Where to run Maven from?
@@ -57,5 +61,4 @@ Maven build goals:
   * -Dtest.exclude=<TESTCLASSNAME>
   * -Dtest.exclude.pattern=**/<TESTCLASSNAME1>.java,**/<TESTCLASSNAME2>.java
 
-
 ----------------------------------------------------------------------------------

+ 3 - 3
dev-support/smart-apply-patch.sh

@@ -59,15 +59,15 @@ PREFIX_DIRS=$(cut -d '/' -f 1 $TMP | sort | uniq)
 if [[ -d hadoop-common ]]; then
   echo Looks like this is being run at project root
 
-# if all of the lines start with hadoop-common/, hdfs/, or mapreduce/, this is
+# if all of the lines start with hadoop-common/, hadoop-hdfs/, or mapreduce/, this is
 # relative to the hadoop root instead of the subproject root, so we need
 # to chop off another layer
-elif [[ "$PREFIX_DIRS" =~ ^(hdfs|hadoop-common|mapreduce)$ ]]; then
+elif [[ "$PREFIX_DIRS" =~ ^(hadoop-hdfs|hadoop-common|mapreduce)$ ]]; then
 
   echo Looks like this is relative to project root. Increasing PLEVEL
   PLEVEL=$[$PLEVEL + 1]
 
-elif ! echo "$PREFIX_DIRS" | grep -vxq 'hadoop-common\|hdfs\|mapreduce' ; then
+elif ! echo "$PREFIX_DIRS" | grep -vxq 'hadoop-common\|hadoop-hdfs\|mapreduce' ; then
   echo Looks like this is a cross-subproject patch. Try applying from the project root
   exit 1
 fi

+ 6 - 6
hadoop-assemblies/src/main/resources/assemblies/hadoop-bintar.xml

@@ -24,9 +24,9 @@
     <fileSet>
       <directory>${basedir}/src/main/bin</directory>
       <outputDirectory>/bin</outputDirectory>
-      <includes>
-        <include>hadoop</include>
-      </includes>
+      <excludes>
+        <exclude>*.sh</exclude>
+      </excludes>
       <fileMode>0755</fileMode>
     </fileSet>
     <fileSet>
@@ -37,7 +37,7 @@
       <directory>${basedir}/src/main/bin</directory>
       <outputDirectory>/libexec</outputDirectory>
       <includes>
-        <include>hadoop-config.sh</include>
+        <include>*-config.sh</include>
       </includes>
       <fileMode>0755</fileMode>
     </fileSet>
@@ -68,7 +68,7 @@
       </includes>
     </fileSet>
     <fileSet>
-      <directory>${basedir}/src/main/webapps</directory>
+      <directory>${project.build.directory}/webapps</directory>
       <outputDirectory>/share/hadoop/${hadoop.component}/webapps</outputDirectory>
     </fileSet>
     <fileSet>
@@ -101,7 +101,7 @@
     <dependencySet>
       <outputDirectory>/share/hadoop/${hadoop.component}/lib</outputDirectory>
       <unpack>false</unpack>
-      <scope>compile</scope>
+      <scope>runtime</scope>
       <useProjectArtifact>false</useProjectArtifact>
       <excludes>
         <exclude>org.apache.ant:*:jar</exclude>

+ 13 - 3
hadoop-assemblies/src/main/resources/assemblies/hadoop-tar.xml

@@ -40,7 +40,7 @@
       <directory>${basedir}/src/main/bin</directory>
       <outputDirectory>/libexec</outputDirectory>
       <includes>
-        <include>hadoop-config.sh</include>
+        <include>*-config.sh</include>
       </includes>
       <fileMode>0755</fileMode>
     </fileSet>
@@ -51,6 +51,16 @@
     <fileSet>
       <directory>${basedir}/src/main/webapps</directory>
       <outputDirectory>/webapps</outputDirectory>
+      <excludes>
+        <exclude>proto-*-web.xml</exclude>
+      </excludes>
+    </fileSet>
+    <fileSet>
+      <directory>${project.build.directory}/webapps</directory>
+      <outputDirectory>/webapps</outputDirectory>
+      <excludes>
+        <exclude>proto-*-web.xml</exclude>
+      </excludes>
     </fileSet>
     <fileSet>
       <directory>${project.build.directory}/site</directory>
@@ -73,11 +83,11 @@
     <dependencySet>
       <outputDirectory>/lib</outputDirectory>
       <unpack>false</unpack>
-      <scope>compile</scope>
+      <scope>runtime</scope>
       <useProjectArtifact>false</useProjectArtifact>
       <excludes>
         <exclude>org.apache.ant:*:jar</exclude>
-        <exclude>org.apache.hadoop:hadoop-*:jar</exclude>
+        <exclude>org.apache.hadoop:hadoop-*:*:*:*</exclude>
         <exclude>jdiff:jdiff:jar</exclude>
       </excludes>
     </dependencySet>

+ 10 - 0
hadoop-common/CHANGES.txt

@@ -324,6 +324,13 @@ Trunk (unreleased changes)
 
     HADOOP-7531. Add servlet util methods for handling paths in requests. (eli)
 
+    HADOOP-7493. Add ShortWritable.  (Uma Maheswara Rao G via szetszwo)
+
+    HADOOP-7555. Add a eclipse-generated files to .gitignore. (atm)
+
+    HADOOP-7264. Bump avro version to at least 1.4.1. (Alejandro Abdelnur via
+    tomwhite)
+
   OPTIMIZATIONS
   
     HADOOP-7333. Performance improvement in PureJavaCrc32. (Eric Caspole
@@ -498,6 +505,9 @@ Trunk (unreleased changes)
     HADOOP-7545. Common -tests JAR should not include properties and configs.
     (todd)
 
+    HADOOP-7536. Correct the dependency version regressions introduced in
+    HADOOP-6671. (Alejandro Abdelnur via tomwhite)
+
 Release 0.22.0 - Unreleased
 
   INCOMPATIBLE CHANGES

+ 16 - 568
hadoop-common/pom.xml

@@ -16,9 +16,9 @@
   <modelVersion>4.0.0</modelVersion>
   <parent>
     <groupId>org.apache.hadoop</groupId>
-    <artifactId>hadoop-project</artifactId>
+    <artifactId>hadoop-project-distro</artifactId>
     <version>0.23.0-SNAPSHOT</version>
-    <relativePath>../hadoop-project</relativePath>
+    <relativePath>../hadoop-project-distro</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-common</artifactId>
@@ -28,18 +28,12 @@
   <packaging>jar</packaging>
 
   <properties>
-    <test.build.data>${project.build.directory}/test/data</test.build.data>
-    <hadoop.log.dir>${project.build.directory}/log</hadoop.log.dir>
-    <test.build.webapps>${project.build.directory}/test-classes/webapps</test.build.webapps>
-    <test.cache.data>${project.build.directory}/test-classes</test.cache.data>
-    <test.build.classes>${project.build.directory}/test-classes</test.build.classes>
-
-    <build.platform>${os.name}-${os.arch}-${sun.arch.data.model}</build.platform>
     <snappy.prefix>/usr/local</snappy.prefix>
     <snappy.lib>${snappy.prefix}/lib</snappy.lib>
     <bundle.snappy>false</bundle.snappy>
     
     <hadoop.component>common</hadoop.component>
+    <is.hadoop.component>true</is.hadoop.component>
   </properties>
 
   <dependencies>
@@ -219,10 +213,15 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.apache.hadoop</groupId>
+      <groupId>org.apache.avro</groupId>
       <artifactId>avro</artifactId>
       <scope>compile</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.avro</groupId>
+      <artifactId>avro-ipc</artifactId>
+      <scope>compile</scope>
+    </dependency>
     <dependency>
       <groupId>net.sf.kosmosfs</groupId>
       <artifactId>kfs</artifactId>
@@ -243,83 +242,20 @@
   <build>
     <plugins>
       <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-surefire-plugin</artifactId>
-        <configuration>
-          <forkMode>always</forkMode>
-          <forkedProcessTimeoutInSeconds>600</forkedProcessTimeoutInSeconds>
-          <argLine>-Xmx1024m</argLine>
-          <environmentVariables>
-            <LD_LIBRARY_PATH>${env.LD_LIBRARY_PATH}:${project.build.directory}/native/target/usr/local/lib</LD_LIBRARY_PATH>
-          </environmentVariables>
-          <systemPropertyVariables>
-
-            <!-- TODO: all references in testcases should be updated to this default -->
-            <test.build.data>${test.build.data}</test.build.data>
-            <test.build.webapps>${test.build.webapps}</test.build.webapps>
-            <test.cache.data>${test.cache.data}</test.cache.data>
-            <hadoop.log.dir>${hadoop.log.dir}</hadoop.log.dir>
-            <test.build.classes>${test.build.classes}</test.build.classes>
-
-            <java.net.preferIPv4Stack>true</java.net.preferIPv4Stack>
-            <java.security.krb5.conf>${basedir}/src/test/resources/krb5.conf</java.security.krb5.conf>
-          </systemPropertyVariables>
-          <includes>
-            <include>**/Test*.java</include>
-          </includes>
-          <excludes>
-            <exclude>**/${test.exclude}.java</exclude>
-            <exclude>${test.exclude.pattern}</exclude>
-            <exclude>**/Test*$*.java</exclude>
-          </excludes>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-jar-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>prepare-jar</id>
-            <phase>prepare-package</phase>
-            <goals>
-              <goal>jar</goal>
-            </goals>
-          </execution>
-          <execution>
-            <id>prepare-test-jar</id>
-            <phase>prepare-package</phase>
-            <goals>
-              <goal>test-jar</goal>
-            </goals>
-            <configuration>
-              <includes>
-                <include>**/*.class</include>
-              </includes>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-source-plugin</artifactId>
+        <groupId>org.apache.avro</groupId>
+        <artifactId>avro-maven-plugin</artifactId>
         <executions>
           <execution>
-            <phase>prepare-package</phase>
+            <id>generate-avro-test-sources</id>
+            <phase>generate-test-sources</phase>
             <goals>
-              <goal>jar</goal>
-              <goal>test-jar</goal>
+              <goal>schema</goal>
+              <goal>protocol</goal>
             </goals>
           </execution>
         </executions>
         <configuration>
-          <attach>true</attach>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>findbugs-maven-plugin</artifactId>
-        <configuration>
-          <excludeFilterFile>${basedir}/dev-support/findbugsExcludeFile.xml</excludeFilterFile>
+          <testOutputDirectory>${project.build.directory}/generated-test-sources/java</testOutputDirectory>
         </configuration>
       </plugin>
       <plugin>
@@ -359,24 +295,6 @@
                 <recordcc destdir="${project.build.directory}/generated-test-sources/java">
                   <fileset dir="${basedir}/src/test/ddl" includes="**/*.jr"/>
                 </recordcc>
-
-                <taskdef name="schema" classname="org.apache.avro.specific.SchemaTask">
-                  <classpath refid="maven.test.classpath"/>
-                </taskdef>
-                <schema destdir="${project.build.directory}/generated-test-sources/java">
-                  <fileset dir="${basedir}/src/test">
-                    <include name="**/*.avsc"/>
-                  </fileset>
-                </schema>
-
-                <taskdef name="schema" classname="org.apache.avro.specific.ProtocolTask">
-                  <classpath refid="maven.test.classpath"/>
-                </taskdef>
-                <schema destdir="${project.build.directory}/generated-test-sources/java">
-                  <fileset dir="${basedir}/src/test">
-                    <include name="**/*.avpr"/>
-                  </fileset>
-                </schema>
               </target>
             </configuration>
           </execution>
@@ -433,17 +351,6 @@
           </execution>
         </executions>
       </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-checkstyle-plugin</artifactId>
-        <configuration>
-          <configLocation>file://${basedir}/dev-support/checkstyle.xml</configLocation>
-          <failOnViolation>false</failOnViolation>
-          <format>xml</format>
-          <format>html</format>
-          <outputFile>${project.build.directory}/test/checkstyle-errors.xml</outputFile>
-        </configuration>
-      </plugin>
       <plugin>
         <groupId>org.apache.rat</groupId>
         <artifactId>apache-rat-plugin</artifactId>
@@ -463,43 +370,6 @@
           </excludes>
         </configuration>
       </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-javadoc-plugin</artifactId>
-        <configuration>
-          <linksource>true</linksource>
-          <quiet>true</quiet>
-          <verbose>false</verbose>
-          <source>${maven.compile.source}</source>
-          <charset>${maven.compile.encoding}</charset>
-          <reportOutputDirectory>${project.build.directory}/site</reportOutputDirectory>
-          <destDir>api</destDir>
-          <groups>
-            <group>
-              <title>${project.name} API</title>
-              <packages>org.apache.hadoop*</packages>
-            </group>
-          </groups>
-          <doclet>org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsStandardDoclet</doclet>
-          <docletArtifacts>
-            <docletArtifact>
-              <groupId>org.apache.hadoop</groupId>
-              <artifactId>hadoop-annotations</artifactId>
-              <version>${project.version}</version>
-            </docletArtifact>
-          </docletArtifacts>
-          <useStandardDocletOptions>true</useStandardDocletOptions>
-
-          <!-- switch on dependency-driven aggregation -->
-          <includeDependencySources>true</includeDependencySources>
-
-          <dependencySourceIncludes>
-            <!-- include ONLY dependencies I control -->
-            <dependencySourceInclude>org.apache.hadoop:hadoop-annotations</dependencySourceInclude>
-          </dependencySourceIncludes>
-
-        </configuration>
-      </plugin>
     </plugins>
   </build>
 
@@ -636,427 +506,5 @@
         </plugins>
       </build>
     </profile>
-
-    <profile>
-      <id>docs</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-      </activation>
-      <properties>
-        <jdiff.stable.api>0.20.2</jdiff.stable.api>
-        <jdiff.stability>-unstable</jdiff.stability>
-        <jdiff.compatibility></jdiff.compatibility>
-        <jdiff.javadoc.maxmemory>512m</jdiff.javadoc.maxmemory>
-      </properties>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-javadoc-plugin</artifactId>
-            <executions>
-              <execution>
-                <goals>
-                  <goal>javadoc</goal>
-                </goals>
-                <phase>prepare-package</phase>
-              </execution>
-            </executions>
-          </plugin>
-          <plugin>
-            <groupId>org.codehaus.mojo</groupId>
-            <artifactId>findbugs-maven-plugin</artifactId>
-            <executions>
-              <execution>
-                <goals>
-                  <goal>findbugs</goal>
-                </goals>
-                <phase>prepare-package</phase>
-              </execution>
-            </executions>
-            <configuration>
-              <excludeFilterFile>${basedir}/dev-support/findbugsExcludeFile.xml</excludeFilterFile>
-            </configuration>
-          </plugin>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-dependency-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>site</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>copy</goal>
-                </goals>
-                <configuration>
-                  <artifactItems>
-                    <artifactItem>
-                      <groupId>jdiff</groupId>
-                      <artifactId>jdiff</artifactId>
-                      <version>${jdiff.version}</version>
-                      <overWrite>false</overWrite>
-                      <outputDirectory>${project.build.directory}</outputDirectory>
-                      <destFileName>jdiff.jar</destFileName>
-                    </artifactItem>
-                    <artifactItem>
-                      <groupId>org.apache.hadoop</groupId>
-                      <artifactId>hadoop-annotations</artifactId>
-                      <version>${hadoop.annotations.version}</version>
-                      <overWrite>false</overWrite>
-                      <outputDirectory>${project.build.directory}</outputDirectory>
-                      <destFileName>hadoop-annotations.jar</destFileName>
-                    </artifactItem>
-                  </artifactItems>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>site</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>run</goal>
-                </goals>
-                <configuration>
-                  <target>
-
-                    <mkdir dir="${project.build.directory}/docs-src"/>
-
-                    <copy todir="${project.build.directory}/docs-src">
-                      <fileset dir="${basedir}/src/main/docs"/>
-                    </copy>
-
-                    <!-- Docs -->
-                    <exec dir="${project.build.directory}/docs-src"
-                          executable="${env.FORREST_HOME}/bin/forrest"
-                          failonerror="true">
-                    </exec>
-                    <copy todir="${project.build.directory}/site">
-                      <fileset dir="${project.build.directory}/docs-src/build/site"/>
-                    </copy>
-                    <copy file="${project.build.directory}/docs-src/releasenotes.html"
-                          todir="${project.build.directory}/site"/>
-                    <style basedir="${basedir}/src/main/resources"
-                           destdir="${project.build.directory}/site"
-                           includes="core-default.xml"
-                           style="${basedir}/src/main/xsl/configuration.xsl"/>
-
-                    <!-- Convert 'CHANGES.txt' to 'changes.html" -->
-                    <exec executable="perl" input="${basedir}/../CHANGES.txt"
-                          output="${project.build.directory}/site/changes.html"
-                          failonerror="true">
-                      <arg value="${project.build.directory}/docs-src/changes/changes2html.pl"/>
-                    </exec>
-                    <copy todir="${project.build.directory}/site">
-                      <fileset dir="${project.build.directory}/docs-src/changes" includes="*.css"/>
-                    </copy>
-
-                    <!-- Jdiff -->
-                    <mkdir dir="${project.build.directory}/site/jdiff/xml"/>
-
-                    <javadoc maxmemory="${jdiff.javadoc.maxmemory}" verbose="yes">
-                      <doclet name="org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsJDiffDoclet"
-                              path="${project.build.directory}/hadoop-annotations.jar:${project.build.directory}/jdiff.jar">
-                        <param name="-apidir" value="${project.build.directory}/site/jdiff/xml"/>
-                        <param name="-apiname" value="hadoop-core ${project.version}"/>
-                        <param name="${jdiff.stability}"/>
-                      </doclet>
-                      <packageset dir="${basedir}/src/main/java"/>
-                      <classpath>
-                        <path refid="maven.compile.classpath"/>
-                      </classpath>
-                    </javadoc>
-                    <javadoc sourcepath="${basedir}/src/main/java"
-                             destdir="${project.build.directory}/site/jdiff/xml"
-                             sourceFiles="${basedir}/dev-support/jdiff/Null.java"
-                             maxmemory="${jdiff.javadoc.maxmemory}">
-                      <doclet name="org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsJDiffDoclet"
-                              path="${project.build.directory}/hadoop-annotations.jar:${project.build.directory}/jdiff.jar">
-                        <param name="-oldapi" value="hadoop-core ${jdiff.stable.api}"/>
-                        <param name="-newapi" value="hadoop-core ${project.version}"/>
-                        <param name="-oldapidir" value="${basedir}/dev-support/jdiff"/>
-                        <param name="-newapidir" value="${project.build.directory}/site/jdiff/xml"/>
-                        <param name="-javadocold"
-                               value="http://hadoop.apache.org/docs/${jdiff.stable.api}/api/"/>
-                        <param name="-javadocnew" value="${project.build.directory}/site/api"/>
-                        <param name="-stats"/>
-                        <param name="${jdiff.stability}"/>
-                        <param name="${jdiff.compatibility}"/>
-                      </doclet>
-                      <classpath>
-                        <path refid="maven.compile.classpath"/>
-                      </classpath>
-                    </javadoc>
-
-                    <xslt style="${env.FINDBUGS_HOME}/src/xsl/default.xsl"
-                          in="${project.build.directory}/findbugsXml.xml"
-                          out="${project.build.directory}/site/findbugs.html"/>
-
-                  </target>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-
-    <profile>
-      <id>src</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-      </activation>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-assembly-plugin</artifactId>
-            <dependencies>
-              <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-assemblies</artifactId>
-                <version>${hadoop.assemblies.version}</version>
-              </dependency>
-            </dependencies>
-            <executions>
-              <execution>
-                <id>pre-tar-src</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>single</goal>
-                </goals>
-                <configuration>
-                  <appendAssemblyId>false</appendAssemblyId>
-                  <attach>false</attach>
-                  <finalName>${project.artifactId}-${project.version}</finalName>
-                  <descriptorRefs>
-                    <descriptorRef>hadoop-src</descriptorRef>
-                  </descriptorRefs>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-
-    <profile>
-      <id>tar</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-      </activation>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>pre-tar</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>run</goal>
-                </goals>
-                <configuration>
-                  <target>
-                    <!-- Using Unix script to preserve symlinks -->
-                    <echo file="${project.build.directory}/tar-copynativelibs.sh">
-
-                      which cygpath 2> /dev/null
-                      if [ $? = 1 ]; then
-                        BUILD_DIR="${project.build.directory}"
-                      else
-                        BUILD_DIR=`cygpath --unix '${project.build.directory}'`
-                      fi
-                      TAR='tar cf -'
-                      UNTAR='tar xfBp -'
-                      LIB_DIR="${BUILD_DIR}/native/target/usr/local/lib"
-                      if [ -d $${LIB_DIR} ] ; then
-                      TARGET_DIR="${BUILD_DIR}/${project.artifactId}-${project.version}/lib/native/${build.platform}"
-                      mkdir -p $${TARGET_DIR}
-                      cd $${LIB_DIR}
-                      $$TAR *hadoop* | (cd $${TARGET_DIR}/; $$UNTAR)
-                      if [ "${bundle.snappy}" = "true" ] ; then
-                      cd ${snappy.lib}
-                      $$TAR *snappy* | (cd $${TARGET_DIR}/; $$UNTAR)
-                      fi
-                      fi
-                    </echo>
-                    <exec executable="sh" dir="${project.build.directory}" failonerror="true">
-                      <arg line="./tar-copynativelibs.sh"/>
-                    </exec>
-                  </target>
-                </configuration>
-              </execution>
-              <execution>
-                <id>tar</id>
-                <phase>package</phase>
-                <goals>
-                  <goal>run</goal>
-                </goals>
-                <configuration>
-                  <target>
-                    <!-- Using Unix script to preserve symlinks -->
-                    <echo file="${project.build.directory}/tar-maketar.sh">
-
-                      which cygpath 2> /dev/null
-                      if [ $? = 1 ]; then
-                        BUILD_DIR="${project.build.directory}"
-                      else
-                        BUILD_DIR=`cygpath --unix '${project.build.directory}'`
-                      fi
-                      cd ${BUILD_DIR}
-                      tar czf ${project.artifactId}-${project.version}.tar.gz ${project.artifactId}-${project.version}
-                    </echo>
-                    <exec executable="sh" dir="${project.build.directory}" failonerror="true">
-                      <arg line="./tar-maketar.sh"/>
-                    </exec>
-                  </target>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-assembly-plugin</artifactId>
-            <dependencies>
-              <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-assemblies</artifactId>
-                <version>${hadoop.assemblies.version}</version>
-              </dependency>
-            </dependencies>
-            <executions>
-              <execution>
-                <id>pre-tar</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>single</goal>
-                </goals>
-                <configuration>
-                  <appendAssemblyId>false</appendAssemblyId>
-                  <attach>false</attach>
-                  <finalName>${project.artifactId}-${project.version}</finalName>
-                  <descriptorRefs>
-                    <descriptorRef>hadoop-tar</descriptorRef>
-                  </descriptorRefs>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-
-    <profile>
-      <id>bintar</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-      </activation>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>pre-bintar</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>run</goal>
-                </goals>
-                <configuration>
-                  <target>
-                    <!-- Using Unix script to preserve symlinks -->
-                    <echo file="${project.build.directory}/bintar-copynativelibs.sh">
-
-                      which cygpath 2> /dev/null
-                      if [ $? = 1 ]; then
-                        BUILD_DIR="${project.build.directory}"
-                      else
-                        BUILD_DIR=`cygpath --unix '${project.build.directory}'`
-                      fi
-                      TAR='tar cf -'
-                      UNTAR='tar xfBp -'
-                      LIB_DIR="${BUILD_DIR}/native/target/usr/local/lib"
-                      if [ -d $${LIB_DIR} ] ; then
-                      TARGET_DIR="${BUILD_DIR}/${project.artifactId}-${project.version}-bin/lib"
-                      mkdir -p $${TARGET_DIR}
-                      cd $${LIB_DIR}
-                      $$TAR *hadoop* | (cd $${TARGET_DIR}/; $$UNTAR)
-                      if [ "${bundle.snappy}" = "true" ] ; then
-                      cd ${snappy.lib}
-                      $$TAR *snappy* | (cd $${TARGET_DIR}/; $$UNTAR)
-                      fi
-                      fi
-                    </echo>
-                    <exec executable="sh" dir="${project.build.directory}" failonerror="true">
-                      <arg line="./bintar-copynativelibs.sh"/>
-                    </exec>
-                  </target>
-                </configuration>
-              </execution>
-              <execution>
-                <id>bintar</id>
-                <phase>package</phase>
-                <goals>
-                  <goal>run</goal>
-                </goals>
-                <configuration>
-                  <target>
-                    <!-- Using Unix script to preserve symlinks -->
-                    <echo file="${project.build.directory}/bintar-maketar.sh">
-
-                      which cygpath 2> /dev/null
-                      if [ $? = 1 ]; then
-                        BUILD_DIR="${project.build.directory}"
-                      else
-                        BUILD_DIR=`cygpath --unix '${project.build.directory}'`
-                      fi
-                      cd ${BUILD_DIR}
-                      tar czf ${project.artifactId}-${project.version}-bin.tar.gz ${project.artifactId}-${project.version}-bin
-                    </echo>
-                    <exec executable="sh" dir="${project.build.directory}" failonerror="true">
-                      <arg line="./bintar-maketar.sh"/>
-                    </exec>
-                  </target>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-assembly-plugin</artifactId>
-            <dependencies>
-              <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-assemblies</artifactId>
-                <version>${hadoop.assemblies.version}</version>
-              </dependency>
-            </dependencies>
-            <executions>
-              <execution>
-                <id>pre-bintar</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>single</goal>
-                </goals>
-                <configuration>
-                  <appendAssemblyId>false</appendAssemblyId>
-                  <attach>false</attach>
-                  <finalName>${project.artifactId}-${project.version}-bin</finalName>
-                  <descriptorRefs>
-                    <descriptorRef>hadoop-bintar</descriptorRef>
-                  </descriptorRefs>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
   </profiles>
 </project>

+ 111 - 0
hadoop-common/src/main/java/org/apache/hadoop/io/ShortWritable.java

@@ -0,0 +1,111 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.io;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/** A WritableComparable for shorts. */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class ShortWritable implements WritableComparable<ShortWritable> {
+  private short value;
+
+  public ShortWritable() {
+  }
+
+  public ShortWritable(short value) {
+    set(value);
+  }
+
+  /** Set the value of this ShortWritable. */
+  public void set(short value) {
+    this.value = value;
+  }
+
+  /** Return the value of this ShortWritable. */
+  public short get() {
+    return value;
+  }
+
+  /** read the short value */
+  @Override
+  public void readFields(DataInput in) throws IOException {
+    value = in.readShort();
+  }
+
+  /** write short value */
+  @Override
+  public void write(DataOutput out) throws IOException {
+    out.writeShort(value);
+  }
+
+  /** Returns true iff <code>o</code> is a ShortWritable with the same value. */
+  @Override
+  public boolean equals(Object o) {
+    if (!(o instanceof ShortWritable))
+      return false;
+    ShortWritable other = (ShortWritable) o;
+    return this.value == other.value;
+  }
+
+  /** hash code */
+  @Override
+  public int hashCode() {
+    return value;
+  }
+
+  /** Compares two ShortWritable. */
+  @Override
+  public int compareTo(ShortWritable o) {
+    short thisValue = this.value;
+    short thatValue = (o).value;
+    return (thisValue < thatValue ? -1 : (thisValue == thatValue ? 0 : 1));
+  }
+
+  /** Short values in string format */
+  @Override
+  public String toString() {
+    return Short.toString(value);
+  }
+
+  /** A Comparator optimized for ShortWritable. */
+  public static class Comparator extends WritableComparator {
+
+    public Comparator() {
+      super(ShortWritable.class);
+    }
+    
+    @Override
+    public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
+      short thisValue = (short) readUnsignedShort(b1, s1);
+      short thatValue = (short) readUnsignedShort(b2, s2);
+      return (thisValue < thatValue ? -1 : (thisValue == thatValue ? 0 : 1));
+    }
+  }
+
+  static { // register this comparator
+    WritableComparator.define(ShortWritable.class, new Comparator());
+  }
+
+}

+ 3 - 2
hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java

@@ -28,6 +28,7 @@ import org.apache.avro.io.BinaryEncoder;
 import org.apache.avro.io.DatumReader;
 import org.apache.avro.io.DatumWriter;
 import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.EncoderFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configured;
@@ -93,7 +94,7 @@ public abstract class AvroSerialization<T> extends Configured
     @Override
     public void open(OutputStream out) throws IOException {
       outStream = out;
-      encoder = new BinaryEncoder(out);
+      encoder = EncoderFactory.get().binaryEncoder(out, encoder);
     }
 
     @Override
@@ -127,7 +128,7 @@ public abstract class AvroSerialization<T> extends Configured
     @Override
     public void open(InputStream in) throws IOException {
       inStream = in;
-      decoder = DecoderFactory.defaultFactory().createBinaryDecoder(in, null);
+      decoder = DecoderFactory.get().binaryDecoder(in, decoder);
     }
 
   }

+ 3 - 3
hadoop-common/src/main/java/org/apache/hadoop/ipc/AvroRpcEngine.java

@@ -34,9 +34,9 @@ import javax.net.SocketFactory;
 
 import org.apache.avro.ipc.Responder;
 import org.apache.avro.ipc.Transceiver;
-import org.apache.avro.reflect.ReflectRequestor;
-import org.apache.avro.reflect.ReflectResponder;
-import org.apache.avro.specific.SpecificRequestor;
+import org.apache.avro.ipc.reflect.ReflectRequestor;
+import org.apache.avro.ipc.reflect.ReflectResponder;
+import org.apache.avro.ipc.specific.SpecificRequestor;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceStability;

+ 2 - 2
hadoop-common/src/main/java/org/apache/hadoop/ipc/AvroSpecificRpcEngine.java

@@ -22,8 +22,8 @@ import java.io.IOException;
 
 import org.apache.avro.ipc.Responder;
 import org.apache.avro.ipc.Transceiver;
-import org.apache.avro.specific.SpecificRequestor;
-import org.apache.avro.specific.SpecificResponder;
+import org.apache.avro.ipc.specific.SpecificRequestor;
+import org.apache.avro.ipc.specific.SpecificResponder;
 import org.apache.hadoop.classification.InterfaceStability;
 
 /**

+ 0 - 0
hadoop-common/src/test/java/org/apache/hadoop/ipc/AvroSpecificTestProtocol.avpr → hadoop-common/src/test/avro/AvroSpecificTestProtocol.avpr


+ 0 - 0
hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/avroRecord.avsc → hadoop-common/src/test/avro/avroRecord.avsc


+ 6 - 5
hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java

@@ -18,15 +18,16 @@
 
 package org.apache.hadoop.io;
 
-import java.io.IOException;
+import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.lang.reflect.Type;
 
 import org.apache.avro.Schema;
+import org.apache.avro.io.BinaryEncoder;
+import org.apache.avro.io.EncoderFactory;
 import org.apache.avro.reflect.ReflectData;
 import org.apache.avro.reflect.ReflectDatumWriter;
 import org.apache.avro.reflect.ReflectDatumReader;
-import org.apache.avro.io.BinaryEncoder;
 import org.apache.avro.io.DecoderFactory;
 
 import static junit.framework.TestCase.assertEquals;
@@ -47,11 +48,11 @@ public class AvroTestUtil {
     // check that value is serialized correctly
     ReflectDatumWriter<Object> writer = new ReflectDatumWriter<Object>(s);
     ByteArrayOutputStream out = new ByteArrayOutputStream();
-    writer.write(value, new BinaryEncoder(out));
+    writer.write(value, EncoderFactory.get().directBinaryEncoder(out, null));
     ReflectDatumReader<Object> reader = new ReflectDatumReader<Object>(s);
     Object after =
-      reader.read(null, DecoderFactory.defaultFactory().createBinaryDecoder(
-          out.toByteArray(), null));
+      reader.read(null,
+                  DecoderFactory.get().binaryDecoder(out.toByteArray(), null));
     assertEquals(value, after);
   }
 

+ 35 - 4
hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java

@@ -18,10 +18,11 @@
 
 package org.apache.hadoop.io;
 
-import java.io.*;
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
 import java.util.Random;
 
-import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.util.ReflectionUtils;
 
@@ -68,6 +69,10 @@ public class TestWritable extends TestCase {
   public void testByteWritable() throws Exception {
     testWritable(new ByteWritable((byte)128));
   }
+  
+  public void testShortWritable() throws Exception {
+    testWritable(new ShortWritable((byte)256));
+  }
 
   public void testDoubleWritable() throws Exception {
     testWritable(new DoubleWritable(1.0));
@@ -104,13 +109,13 @@ public class TestWritable extends TestCase {
     }
   }
 
-  private static class Frob implements WritableComparable {
+  private static class Frob implements WritableComparable<Frob> {
     static {                                     // register default comparator
       WritableComparator.define(Frob.class, new FrobComparator());
     }
     @Override public void write(DataOutput out) throws IOException {}
     @Override public void readFields(DataInput in) throws IOException {}
-    @Override public int compareTo(Object o) { return 0; }
+    @Override public int compareTo(Frob o) { return 0; }
   }
 
   /** Test that comparator is defined. */
@@ -118,5 +123,31 @@ public class TestWritable extends TestCase {
     assert(WritableComparator.get(Frob.class) instanceof FrobComparator);
   }
 
+  /**
+   * Test a user comparator that relies on deserializing both arguments for each
+   * compare.
+   */
+  public void testShortWritableComparator() throws Exception {
+    ShortWritable writable1 = new ShortWritable((short)256);
+    ShortWritable writable2 = new ShortWritable((short) 128);
+    ShortWritable writable3 = new ShortWritable((short) 256);
+    
+    final String SHOULD_NOT_MATCH_WITH_RESULT_ONE = "Result should be 1, should not match the writables";
+    assertTrue(SHOULD_NOT_MATCH_WITH_RESULT_ONE,
+        writable1.compareTo(writable2) == 1);
+    assertTrue(SHOULD_NOT_MATCH_WITH_RESULT_ONE, WritableComparator.get(
+        ShortWritable.class).compare(writable1, writable2) == 1);
+
+    final String SHOULD_NOT_MATCH_WITH_RESULT_MINUS_ONE = "Result should be -1, should not match the writables";
+    assertTrue(SHOULD_NOT_MATCH_WITH_RESULT_MINUS_ONE, writable2
+        .compareTo(writable1) == -1);
+    assertTrue(SHOULD_NOT_MATCH_WITH_RESULT_MINUS_ONE, WritableComparator.get(
+        ShortWritable.class).compare(writable2, writable1) == -1);
+
+    final String SHOULD_MATCH = "Result should be 0, should match the writables";
+    assertTrue(SHOULD_MATCH, writable1.compareTo(writable1) == 0);
+    assertTrue(SHOULD_MATCH, WritableComparator.get(ShortWritable.class)
+        .compare(writable1, writable3) == 0);
+  }
 
 }

+ 1 - 1
hadoop-common/src/test/java/org/apache/hadoop/ipc/AvroTestProtocol.java

@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.ipc;
 
-import org.apache.avro.ipc.AvroRemoteException;
+import org.apache.avro.AvroRemoteException;
 
 @SuppressWarnings("serial")
 public interface AvroTestProtocol {

+ 3 - 3
hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAvroRpc.java

@@ -28,7 +28,7 @@ import javax.security.sasl.Sasl;
 import junit.framework.Assert;
 import junit.framework.TestCase;
 
-import org.apache.avro.ipc.AvroRemoteException;
+import org.apache.avro.AvroRemoteException;
 import org.apache.avro.util.Utf8;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -189,7 +189,7 @@ public class TestAvroRpc extends TestCase {
         (AvroSpecificTestProtocol)RPC.getProxy(AvroSpecificTestProtocol.class, 
             0, addr, conf);
       
-      Utf8 echo = proxy.echo(new Utf8("hello world"));
+      CharSequence echo = proxy.echo("hello world");
       assertEquals("hello world", echo.toString());
 
       int intResult = proxy.add(1, 2);
@@ -210,7 +210,7 @@ public class TestAvroRpc extends TestCase {
     }
 
     @Override
-    public Utf8 echo(Utf8 msg) throws AvroRemoteException {
+    public CharSequence echo(CharSequence msg) throws AvroRemoteException {
       return msg;
     }
     

+ 0 - 0
hdfs/CHANGES.HDFS-1623.txt → hadoop-hdfs/CHANGES.HDFS-1623.txt


+ 11 - 0
hdfs/CHANGES.txt → hadoop-hdfs/CHANGES.txt

@@ -665,6 +665,14 @@ Trunk (unreleased changes)
 
     HDFS-2233. Add WebUI tests with URI reserved chars. (eli)
 
+    HDFS-2265. Remove unnecessary BlockTokenSecretManager fields/methods from
+    BlockManager.  (szetszwo)
+
+    HDFS-2260. Refactor BlockReader into an interface and implementation.
+    (todd)
+
+    HDFS-2096. Mavenization of hadoop-hdfs (Alejandro Abdelnur via tomwhite)
+
   OPTIMIZATIONS
 
     HDFS-1458. Improve checkpoint performance by avoiding unnecessary image
@@ -969,6 +977,9 @@ Trunk (unreleased changes)
     HDFS-73. DFSOutputStream does not close all the sockets.
     (Uma Maheswara Rao G via eli)
 
+    HDFS-1257. Fix a race condition on BlockManager.recentInvalidateSets.
+    (Eric Payne via szetszwo)
+
   BREAKDOWN OF HDFS-1073 SUBTASKS
 
     HDFS-1521. Persist transaction ID on disk between NN restarts.

+ 0 - 0
hdfs/LICENSE.txt → hadoop-hdfs/LICENSE.txt


+ 0 - 0
hdfs/NOTICE.txt → hadoop-hdfs/NOTICE.txt


+ 0 - 0
hdfs/src/test/all-tests → hadoop-hdfs/dev-support/all-tests


+ 0 - 0
hdfs/src/test/checkstyle-noframes-sorted.xsl → hadoop-hdfs/dev-support/checkstyle-noframes-sorted.xsl


+ 3 - 3
hdfs/src/test/checkstyle.xml → hadoop-hdfs/dev-support/checkstyle.xml

@@ -35,7 +35,7 @@
 
     <!-- Checks that a package.html file exists for each package.     -->
     <!-- See http://checkstyle.sf.net/config_javadoc.html#PackageHtml -->
-    <module name="PackageHtml"/>
+    <module name="JavadocPackage"/>
 
     <!-- Checks whether files end with a new line.                        -->
     <!-- See http://checkstyle.sf.net/config_misc.html#NewlineAtEndOfFile -->
@@ -45,6 +45,8 @@
     <!-- See http://checkstyle.sf.net/config_misc.html#Translation -->
     <module name="Translation"/>
 
+    <module name="FileLength"/>
+    <module name="FileTabCharacter"/>
 
     <module name="TreeWalker">
 
@@ -96,7 +98,6 @@
 
         <!-- Checks for Size Violations.                    -->
         <!-- See http://checkstyle.sf.net/config_sizes.html -->
-        <module name="FileLength"/>
         <module name="LineLength"/>
         <module name="MethodLength"/>
         <module name="ParameterNumber"/>
@@ -110,7 +111,6 @@
         <module name="NoWhitespaceBefore"/>
         <module name="ParenPad"/>
         <module name="TypecastParenPad"/>
-        <module name="TabCharacter"/>
         <module name="WhitespaceAfter">
 	    	<property name="tokens" value="COMMA, SEMI"/>
 		</module>

+ 0 - 0
hdfs/src/test/commit-tests → hadoop-hdfs/dev-support/commit-tests


+ 0 - 0
hdfs/src/test/findbugsExcludeFile.xml → hadoop-hdfs/dev-support/findbugsExcludeFile.xml


+ 0 - 0
hdfs/lib/jdiff/hadoop-hdfs_0.20.0.xml → hadoop-hdfs/dev-support/jdiff/hadoop-hdfs_0.20.0.xml


+ 0 - 0
hdfs/lib/jdiff/hadoop-hdfs_0.21.0.xml → hadoop-hdfs/dev-support/jdiff/hadoop-hdfs_0.21.0.xml


+ 0 - 0
hdfs/src/test/smoke-tests → hadoop-hdfs/dev-support/smoke-tests


+ 3 - 0
hdfs/src/test/test-patch.properties → hadoop-hdfs/dev-support/test-patch.properties

@@ -13,6 +13,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+# The number of acceptable warning for this module
+# Please update the root test-patch.properties if you update this file.
+
 OK_RELEASEAUDIT_WARNINGS=0
 OK_FINDBUGS_WARNINGS=0
 OK_JAVADOC_WARNINGS=0

+ 411 - 0
hadoop-hdfs/pom.xml

@@ -0,0 +1,411 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project>
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>hadoop-project-distro</artifactId>
+    <version>0.23.0-SNAPSHOT</version>
+    <relativePath>../hadoop-project-distro</relativePath>
+  </parent>
+  <groupId>org.apache.hadoop</groupId>
+  <artifactId>hadoop-hdfs</artifactId>
+  <version>0.23.0-SNAPSHOT</version>
+  <description>Apache Hadoop HDFS</description>
+  <name>Apache Hadoop HDFS</name>
+  <packaging>jar</packaging>
+
+  <properties>
+    <hadoop.component>hdfs</hadoop.component>
+    <is.hadoop.component>true</is.hadoop.component>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.aspectj</groupId>
+      <artifactId>aspectjtools</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.aspectj</groupId>
+      <artifactId>aspectjrt</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-annotations</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <scope>test</scope>
+      <type>test-jar</type>
+    </dependency>
+    <dependency>
+      <groupId>commons-logging</groupId>
+      <artifactId>commons-logging</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-daemon</groupId>
+      <artifactId>commons-daemon</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>log4j</groupId>
+      <artifactId>log4j</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.google.protobuf</groupId>
+      <artifactId>protobuf-java</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.avro</groupId>
+      <artifactId>avro</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-log4j12</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-all</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.ant</groupId>
+      <artifactId>ant</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.jcraft</groupId>
+      <artifactId>jsch</artifactId>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.codehaus.mojo.jspc</groupId>
+        <artifactId>jspc-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>hdfs</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>compile</goal>
+            </goals>
+            <configuration>
+              <compile>false</compile>
+              <workingDirectory>${project.build.directory}/generated-src/main/jsp</workingDirectory>
+              <webFragmentFile>${project.build.directory}/hdfs-jsp-servlet-definitions.xml</webFragmentFile>
+              <packageName>org.apache.hadoop.hdfs.server.namenode</packageName>
+              <sources>
+                <directory>${basedir}/src/main/webapps/hdfs</directory>
+                <includes>
+                  <include>*.jsp</include>
+                </includes>
+              </sources>
+            </configuration>
+          </execution>
+          <execution>
+            <id>secondary</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>compile</goal>
+            </goals>
+            <configuration>
+              <compile>false</compile>
+              <workingDirectory>${project.build.directory}/generated-src/main/jsp</workingDirectory>
+              <webFragmentFile>${project.build.directory}/secondary-jsp-servlet-definitions.xml</webFragmentFile>
+              <packageName>org.apache.hadoop.hdfs.server.namenode</packageName>
+              <sources>
+                <directory>${basedir}/src/main/webapps/secondary</directory>
+                <includes>
+                  <include>*.jsp</include>
+                </includes>
+              </sources>
+            </configuration>
+          </execution>
+          <execution>
+            <id>datanode</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>compile</goal>
+            </goals>
+            <configuration>
+              <compile>false</compile>
+              <workingDirectory>${project.build.directory}/generated-src/main/jsp</workingDirectory>
+              <webFragmentFile>${project.build.directory}/datanode-jsp-servlet-definitions.xml</webFragmentFile>
+              <packageName>org.apache.hadoop.hdfs.server.datanode</packageName>
+              <sources>
+                <directory>${basedir}/src/main/webapps/datanode</directory>
+                <includes>
+                  <include>*.jsp</include>
+                </includes>
+              </sources>
+            </configuration>
+          </execution>
+        </executions>
+        <dependencies>
+          <dependency>
+            <groupId>org.codehaus.mojo.jspc</groupId>
+            <artifactId>jspc-compiler-tomcat5</artifactId>
+            <version>2.0-alpha-3</version>
+          </dependency>
+          <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-log4j12</artifactId>
+            <version>1.4.1</version>
+          </dependency>
+          <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>jcl104-over-slf4j</artifactId>
+            <version>1.4.1</version>
+          </dependency>
+        </dependencies>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>build-helper-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>add-source</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>add-source</goal>
+            </goals>
+            <configuration>
+              <sources>
+                <source>${project.build.directory}/generated-src/main/jsp</source>
+              </sources>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>create-web-xmls</id>
+            <phase>compile</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <target>
+                <loadfile property="hdfs.servlet.definitions" srcFile="${project.build.directory}/hdfs-jsp-servlet-definitions.xml"/>
+                <loadfile property="secondary.servlet.definitions" srcFile="${project.build.directory}/secondary-jsp-servlet-definitions.xml"/>
+                <loadfile property="datanode.servlet.definitions" srcFile="${project.build.directory}/datanode-jsp-servlet-definitions.xml"/>
+                <echoproperties destfile="${project.build.directory}/webxml.properties">
+                  <propertyset>
+                    <propertyref regex=".*.servlet.definitions"/>
+                  </propertyset>
+                </echoproperties>
+                <filter filtersfile="${project.build.directory}/webxml.properties"/>
+                <copy file="${basedir}/src/main/webapps/proto-hdfs-web.xml"
+                      tofile="${project.build.directory}/webapps/hdfs/WEB-INF/web.xml"
+                      filtering="true"/>
+                <copy file="${basedir}/src/main/webapps/proto-secondary-web.xml"
+                      tofile="${project.build.directory}/webapps/secondary/WEB-INF/web.xml"
+                      filtering="true"/>
+                <copy file="${basedir}/src/main/webapps/proto-datanode-web.xml"
+                      tofile="${project.build.directory}/webapps/datanode/WEB-INF/web.xml"
+                      filtering="true"/>
+                <copy toDir="${project.build.directory}/webapps">
+                  <fileset dir="${basedir}/src/main/webapps">
+                    <exclude name="**/*.jsp"/>
+                    <exclude name="**/proto-*-web.xml"/>
+                  </fileset>
+                </copy>
+              </target>
+            </configuration>
+          </execution>
+          <execution>
+            <id>create-log-dir</id>
+            <phase>process-test-resources</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <target>
+                <delete dir="${test.build.data}"/>
+                <mkdir dir="${hadoop.log.dir}"/>
+
+                <copy todir="${project.build.directory}/test-classes/webapps">
+                  <fileset dir="${project.build.directory}/webapps">
+                    <exclude name="proto-*-web.xml"/>
+                  </fileset>
+                </copy>
+              </target>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.rat</groupId>
+        <artifactId>apache-rat-plugin</artifactId>
+        <configuration>
+          <excludes>
+            <exclude>CHANGES.txt</exclude>
+            <exclude>CHANGES.HDFS-1623.txt</exclude>
+            <exclude>.idea/**</exclude>
+            <exclude>src/main/conf/*</exclude>
+            <exclude>src/main/docs/**</exclude>
+            <exclude>dev-support/findbugsExcludeFile.xml</exclude>
+            <exclude>dev-support/checkstyle*</exclude>
+            <exclude>dev-support/jdiff/**</exclude>
+            <exclude>dev-support/*tests</exclude>
+            <exclude>src/main/native/*</exclude>
+            <exclude>src/main/native/config/*</exclude>
+            <exclude>src/main/native/m4/*</exclude>
+            <exclude>src/test/empty-file</exclude>
+            <exclude>src/test/all-tests</exclude>
+            <exclude>src/test/resources/*.tgz</exclude>
+            <exclude>src/test/resources/data*</exclude>
+            <exclude>src/test/resources/editStored*</exclude>
+            <exclude>src/test/resources/empty-file</exclude>
+            <exclude>src/main/webapps/datanode/robots.txt</exclude>
+            <exclude>src/main/docs/releasenotes.html</exclude>
+            <exclude>src/contrib/**</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+
+  <profiles>
+    <profile>
+      <id>native</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-antrun-plugin</artifactId>
+            <executions>
+              <execution>
+                <id>compile</id>
+                <phase>compile</phase>
+                <goals>
+                  <goal>run</goal>
+                </goals>
+                <configuration>
+                  <target>
+                    <copy toDir="${project.build.directory}/native">
+                      <fileset dir="${basedir}/src/main/native"/>
+                    </copy>
+                    <mkdir dir="${project.build.directory}/native/m4"/>
+                  </target>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+          <plugin>
+            <groupId>org.codehaus.mojo</groupId>
+            <artifactId>make-maven-plugin</artifactId>
+            <executions>
+              <execution>
+                <id>compile</id>
+                <phase>compile</phase>
+                <goals>
+                  <goal>autoreconf</goal>
+                  <goal>configure</goal>
+                  <goal>make-install</goal>
+                </goals>
+                <configuration>
+                  <!-- autoreconf settings -->
+                  <workDir>${project.build.directory}/native</workDir>
+                  <arguments>
+                    <argument>-i</argument>
+                    <argument>-f</argument>
+                  </arguments>
+
+                  <!-- configure settings -->
+                  <configureEnvironment>
+                    <property>
+                      <name>ac_cv_func_malloc_0_nonnull</name>
+                      <value>yes</value>
+                    </property>
+                    <property>
+                      <name>JVM_ARCH</name>
+                      <value>${sun.arch.data.model}</value>
+                    </property>
+                  </configureEnvironment>
+                  <configureOptions>
+                  </configureOptions>
+                  <configureWorkDir>${project.build.directory}/native</configureWorkDir>
+                  <prefix>/usr/local</prefix>
+
+                  <!-- make settings -->
+                  <installEnvironment>
+                    <property>
+                      <name>ac_cv_func_malloc_0_nonnull</name>
+                      <value>yes</value>
+                    </property>
+                    <property>
+                      <name>JVM_ARCH</name>
+                      <value>${sun.arch.data.model}</value>
+                    </property>
+                  </installEnvironment>
+
+                  <!-- configure & make settings -->
+                  <destDir>${project.build.directory}/native/target</destDir>
+
+                </configuration>
+              </execution>
+              
+              <!-- TODO wire here native testcases
+              <execution>
+                <id>test</id>
+                <phase>test</phase>
+                <goals>
+                  <goal>test</goal>
+                </goals>
+                <configuration>
+                  <destDir>${project.build.directory}/native/target</destDir>
+                </configuration>
+              </execution>
+              -->
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
+</project>

+ 0 - 0
hdfs/src/ant/org/apache/hadoop/ant/DfsTask.java → hadoop-hdfs/src/ant/org/apache/hadoop/ant/DfsTask.java


+ 0 - 0
hdfs/src/ant/org/apache/hadoop/ant/antlib.xml → hadoop-hdfs/src/ant/org/apache/hadoop/ant/antlib.xml


+ 0 - 0
hdfs/src/ant/org/apache/hadoop/ant/condition/DfsBaseConditional.java → hadoop-hdfs/src/ant/org/apache/hadoop/ant/condition/DfsBaseConditional.java


+ 0 - 0
hdfs/src/ant/org/apache/hadoop/ant/condition/DfsExists.java → hadoop-hdfs/src/ant/org/apache/hadoop/ant/condition/DfsExists.java


+ 0 - 0
hdfs/src/ant/org/apache/hadoop/ant/condition/DfsIsDir.java → hadoop-hdfs/src/ant/org/apache/hadoop/ant/condition/DfsIsDir.java


+ 0 - 0
hdfs/src/ant/org/apache/hadoop/ant/condition/DfsZeroLen.java → hadoop-hdfs/src/ant/org/apache/hadoop/ant/condition/DfsZeroLen.java


+ 0 - 0
hdfs/src/contrib/build-contrib.xml → hadoop-hdfs/src/contrib/build-contrib.xml


+ 0 - 0
hdfs/src/contrib/build.xml → hadoop-hdfs/src/contrib/build.xml


+ 0 - 0
hdfs/src/contrib/fuse-dfs/Makefile.am → hadoop-hdfs/src/contrib/fuse-dfs/Makefile.am


+ 0 - 0
hdfs/src/contrib/fuse-dfs/README → hadoop-hdfs/src/contrib/fuse-dfs/README


+ 0 - 0
hdfs/src/contrib/fuse-dfs/acinclude.m4 → hadoop-hdfs/src/contrib/fuse-dfs/acinclude.m4


+ 0 - 0
hdfs/src/contrib/fuse-dfs/build.xml → hadoop-hdfs/src/contrib/fuse-dfs/build.xml


+ 0 - 0
hdfs/src/contrib/fuse-dfs/configure.ac → hadoop-hdfs/src/contrib/fuse-dfs/configure.ac


+ 0 - 0
hdfs/src/contrib/fuse-dfs/global_footer.mk → hadoop-hdfs/src/contrib/fuse-dfs/global_footer.mk


+ 0 - 0
hdfs/src/contrib/fuse-dfs/global_header.mk → hadoop-hdfs/src/contrib/fuse-dfs/global_header.mk


+ 0 - 0
hdfs/src/contrib/fuse-dfs/ivy.xml → hadoop-hdfs/src/contrib/fuse-dfs/ivy.xml


+ 0 - 0
hdfs/src/contrib/fuse-dfs/ivy/libraries.properties → hadoop-hdfs/src/contrib/fuse-dfs/ivy/libraries.properties


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/Makefile.am → hadoop-hdfs/src/contrib/fuse-dfs/src/Makefile.am


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_connect.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_connect.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_connect.h → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_connect.h


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_context_handle.h → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_context_handle.h


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_dfs.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_dfs.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_dfs.h → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_dfs.h


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_dfs_wrapper.sh → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_dfs_wrapper.sh


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_file_handle.h → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_file_handle.h


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls.h → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls.h


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_access.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_access.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_chmod.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_chmod.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_chown.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_chown.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_create.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_create.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_flush.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_flush.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_getattr.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_getattr.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_mkdir.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_mkdir.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_mknod.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_mknod.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_open.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_open.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_read.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_read.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_readdir.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_readdir.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_release.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_release.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_rename.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_rename.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_rmdir.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_rmdir.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_statfs.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_statfs.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_symlink.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_symlink.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_truncate.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_truncate.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_unlink.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_unlink.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_utimens.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_utimens.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_write.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_write.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_init.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_init.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_init.h → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_init.h


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_options.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_options.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_options.h → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_options.h


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_stat_struct.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_stat_struct.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_stat_struct.h → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_stat_struct.h


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_trash.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_trash.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_trash.h → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_trash.h


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_users.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_users.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_users.h → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_users.h


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/test/TestFuseDFS.java → hadoop-hdfs/src/contrib/fuse-dfs/src/test/TestFuseDFS.java


+ 0 - 0
hdfs/bin/distribute-exclude.sh → hadoop-hdfs/src/main/bin/distribute-exclude.sh


+ 0 - 0
hdfs/bin/hdfs → hadoop-hdfs/src/main/bin/hdfs


+ 0 - 0
hdfs/bin/hdfs-config.sh → hadoop-hdfs/src/main/bin/hdfs-config.sh


+ 0 - 0
hdfs/bin/refresh-namenodes.sh → hadoop-hdfs/src/main/bin/refresh-namenodes.sh


+ 0 - 0
hdfs/bin/start-balancer.sh → hadoop-hdfs/src/main/bin/start-balancer.sh


+ 0 - 0
hdfs/bin/start-dfs.sh → hadoop-hdfs/src/main/bin/start-dfs.sh


+ 0 - 0
hdfs/bin/start-secure-dns.sh → hadoop-hdfs/src/main/bin/start-secure-dns.sh


+ 0 - 0
hdfs/bin/stop-balancer.sh → hadoop-hdfs/src/main/bin/stop-balancer.sh


+ 0 - 0
hdfs/bin/stop-dfs.sh → hadoop-hdfs/src/main/bin/stop-dfs.sh


+ 0 - 0
hdfs/bin/stop-secure-dns.sh → hadoop-hdfs/src/main/bin/stop-secure-dns.sh


+ 17 - 0
hdfs/conf/hadoop-metrics2.properties → hadoop-hdfs/src/main/conf/hadoop-metrics2.properties

@@ -1,3 +1,20 @@
+#
+#   Licensed to the Apache Software Foundation (ASF) under one or more
+#   contributor license agreements.  See the NOTICE file distributed with
+#   this work for additional information regarding copyright ownership.
+#   The ASF licenses this file to You under the Apache License, Version 2.0
+#   (the "License"); you may not use this file except in compliance with
+#   the License.  You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+
 # syntax: [prefix].[source|sink].[instance].[options]
 # See javadoc of package-info.java for org.apache.hadoop.metrics2 for details
 

+ 21 - 0
hadoop-hdfs/src/main/conf/hdfs-site.xml

@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+</configuration>

Some files were not shown because too many files changed in this diff