فهرست منبع

HDFS-2096. Mavenization of hadoop-hdfs. Contributed by Alejandro Abdelnur.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1159702 13f79535-47bb-0310-9956-ffa450edef68
Thomas White 13 سال پیش
والد
کامیت
d86f3183d9
100فایلهای تغییر یافته به همراه482 افزوده شده و 576 حذف شده
  1. 5 2
      BUILDING.txt
  2. 3 3
      dev-support/smart-apply-patch.sh
  3. 6 6
      hadoop-assemblies/src/main/resources/assemblies/hadoop-bintar.xml
  4. 13 3
      hadoop-assemblies/src/main/resources/assemblies/hadoop-tar.xml
  5. 3 559
      hadoop-common/pom.xml
  6. 2 0
      hadoop-hdfs/CHANGES.txt
  7. 0 0
      hadoop-hdfs/LICENSE.txt
  8. 0 0
      hadoop-hdfs/NOTICE.txt
  9. 0 0
      hadoop-hdfs/dev-support/all-tests
  10. 0 0
      hadoop-hdfs/dev-support/checkstyle-noframes-sorted.xsl
  11. 3 3
      hadoop-hdfs/dev-support/checkstyle.xml
  12. 0 0
      hadoop-hdfs/dev-support/commit-tests
  13. 0 0
      hadoop-hdfs/dev-support/findbugsExcludeFile.xml
  14. 0 0
      hadoop-hdfs/dev-support/jdiff/hadoop-hdfs_0.20.0.xml
  15. 0 0
      hadoop-hdfs/dev-support/jdiff/hadoop-hdfs_0.21.0.xml
  16. 0 0
      hadoop-hdfs/dev-support/smoke-tests
  17. 3 0
      hadoop-hdfs/dev-support/test-patch.properties
  18. 406 0
      hadoop-hdfs/pom.xml
  19. 0 0
      hadoop-hdfs/src/ant/org/apache/hadoop/ant/DfsTask.java
  20. 0 0
      hadoop-hdfs/src/ant/org/apache/hadoop/ant/antlib.xml
  21. 0 0
      hadoop-hdfs/src/ant/org/apache/hadoop/ant/condition/DfsBaseConditional.java
  22. 0 0
      hadoop-hdfs/src/ant/org/apache/hadoop/ant/condition/DfsExists.java
  23. 0 0
      hadoop-hdfs/src/ant/org/apache/hadoop/ant/condition/DfsIsDir.java
  24. 0 0
      hadoop-hdfs/src/ant/org/apache/hadoop/ant/condition/DfsZeroLen.java
  25. 0 0
      hadoop-hdfs/src/contrib/build-contrib.xml
  26. 0 0
      hadoop-hdfs/src/contrib/build.xml
  27. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/Makefile.am
  28. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/README
  29. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/acinclude.m4
  30. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/build.xml
  31. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/configure.ac
  32. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/global_footer.mk
  33. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/global_header.mk
  34. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/ivy.xml
  35. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/ivy/libraries.properties
  36. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/Makefile.am
  37. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_connect.c
  38. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_connect.h
  39. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_context_handle.h
  40. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_dfs.c
  41. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_dfs.h
  42. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_dfs_wrapper.sh
  43. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_file_handle.h
  44. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls.h
  45. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_access.c
  46. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_chmod.c
  47. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_chown.c
  48. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_create.c
  49. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_flush.c
  50. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_getattr.c
  51. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_mkdir.c
  52. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_mknod.c
  53. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_open.c
  54. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_read.c
  55. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_readdir.c
  56. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_release.c
  57. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_rename.c
  58. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_rmdir.c
  59. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_statfs.c
  60. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_symlink.c
  61. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_truncate.c
  62. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_unlink.c
  63. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_utimens.c
  64. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_write.c
  65. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_init.c
  66. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_init.h
  67. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_options.c
  68. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_options.h
  69. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_stat_struct.c
  70. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_stat_struct.h
  71. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_trash.c
  72. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_trash.h
  73. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_users.c
  74. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_users.h
  75. 0 0
      hadoop-hdfs/src/contrib/fuse-dfs/src/test/TestFuseDFS.java
  76. 0 0
      hadoop-hdfs/src/main/bin/distribute-exclude.sh
  77. 0 0
      hadoop-hdfs/src/main/bin/hdfs
  78. 0 0
      hadoop-hdfs/src/main/bin/hdfs-config.sh
  79. 0 0
      hadoop-hdfs/src/main/bin/refresh-namenodes.sh
  80. 0 0
      hadoop-hdfs/src/main/bin/start-balancer.sh
  81. 0 0
      hadoop-hdfs/src/main/bin/start-dfs.sh
  82. 0 0
      hadoop-hdfs/src/main/bin/start-secure-dns.sh
  83. 0 0
      hadoop-hdfs/src/main/bin/stop-balancer.sh
  84. 0 0
      hadoop-hdfs/src/main/bin/stop-dfs.sh
  85. 0 0
      hadoop-hdfs/src/main/bin/stop-secure-dns.sh
  86. 17 0
      hadoop-hdfs/src/main/conf/hadoop-metrics2.properties
  87. 21 0
      hadoop-hdfs/src/main/conf/hdfs-site.xml
  88. 0 0
      hadoop-hdfs/src/main/docs/changes/ChangesFancyStyle.css
  89. 0 0
      hadoop-hdfs/src/main/docs/changes/ChangesSimpleStyle.css
  90. 0 0
      hadoop-hdfs/src/main/docs/changes/changes2html.pl
  91. 0 0
      hadoop-hdfs/src/main/docs/forrest.properties
  92. 0 0
      hadoop-hdfs/src/main/docs/releasenotes.html
  93. 0 0
      hadoop-hdfs/src/main/docs/src/documentation/README.txt
  94. 0 0
      hadoop-hdfs/src/main/docs/src/documentation/classes/CatalogManager.properties
  95. 0 0
      hadoop-hdfs/src/main/docs/src/documentation/conf/cli.xconf
  96. 0 0
      hadoop-hdfs/src/main/docs/src/documentation/content/xdocs/SLG_user_guide.xml
  97. 0 0
      hadoop-hdfs/src/main/docs/src/documentation/content/xdocs/faultinject_framework.xml
  98. 0 0
      hadoop-hdfs/src/main/docs/src/documentation/content/xdocs/hdfs_design.xml
  99. 0 0
      hadoop-hdfs/src/main/docs/src/documentation/content/xdocs/hdfs_editsviewer.xml
  100. 0 0
      hadoop-hdfs/src/main/docs/src/documentation/content/xdocs/hdfs_imageviewer.xml

+ 5 - 2
hadoop-common/BUILDING.txt → BUILDING.txt

@@ -1,3 +1,6 @@
+
+Build instructions for Hadoop Common/HDFS using Maven
+
 ----------------------------------------------------------------------------------
 Requirements:
 
@@ -16,7 +19,8 @@ Maven modules:
          - hadoop-project     (Parent POM for all Hadoop Maven modules.             )
                               (All plugins & dependencies versions are defined here.)
          - hadoop-annotations (Generates the Hadoop doclet used to generated the Javadocs)
-         - hadoop-common      (Hadoop common)
+         - hadoop-common      (Hadoop Common)
+         - hadoop-hdfs        (Hadoop HDFS)
 
 ----------------------------------------------------------------------------------
 Where to run Maven from?
@@ -57,5 +61,4 @@ Maven build goals:
   * -Dtest.exclude=<TESTCLASSNAME>
   * -Dtest.exclude.pattern=**/<TESTCLASSNAME1>.java,**/<TESTCLASSNAME2>.java
 
-
 ----------------------------------------------------------------------------------

+ 3 - 3
dev-support/smart-apply-patch.sh

@@ -59,15 +59,15 @@ PREFIX_DIRS=$(cut -d '/' -f 1 $TMP | sort | uniq)
 if [[ -d hadoop-common ]]; then
   echo Looks like this is being run at project root
 
-# if all of the lines start with hadoop-common/, hdfs/, or mapreduce/, this is
+# if all of the lines start with hadoop-common/, hadoop-hdfs/, or mapreduce/, this is
 # relative to the hadoop root instead of the subproject root, so we need
 # to chop off another layer
-elif [[ "$PREFIX_DIRS" =~ ^(hdfs|hadoop-common|mapreduce)$ ]]; then
+elif [[ "$PREFIX_DIRS" =~ ^(hadoop-hdfs|hadoop-common|mapreduce)$ ]]; then
 
   echo Looks like this is relative to project root. Increasing PLEVEL
   PLEVEL=$[$PLEVEL + 1]
 
-elif ! echo "$PREFIX_DIRS" | grep -vxq 'hadoop-common\|hdfs\|mapreduce' ; then
+elif ! echo "$PREFIX_DIRS" | grep -vxq 'hadoop-common\|hadoop-hdfs\|mapreduce' ; then
   echo Looks like this is a cross-subproject patch. Try applying from the project root
   exit 1
 fi

+ 6 - 6
hadoop-assemblies/src/main/resources/assemblies/hadoop-bintar.xml

@@ -24,9 +24,9 @@
     <fileSet>
       <directory>${basedir}/src/main/bin</directory>
       <outputDirectory>/bin</outputDirectory>
-      <includes>
-        <include>hadoop</include>
-      </includes>
+      <excludes>
+        <exclude>*.sh</exclude>
+      </excludes>
       <fileMode>0755</fileMode>
     </fileSet>
     <fileSet>
@@ -37,7 +37,7 @@
       <directory>${basedir}/src/main/bin</directory>
       <outputDirectory>/libexec</outputDirectory>
       <includes>
-        <include>hadoop-config.sh</include>
+        <include>*-config.sh</include>
       </includes>
       <fileMode>0755</fileMode>
     </fileSet>
@@ -68,7 +68,7 @@
       </includes>
     </fileSet>
     <fileSet>
-      <directory>${basedir}/src/main/webapps</directory>
+      <directory>${project.build.directory}/webapps</directory>
       <outputDirectory>/share/hadoop/${hadoop.component}/webapps</outputDirectory>
     </fileSet>
     <fileSet>
@@ -101,7 +101,7 @@
     <dependencySet>
       <outputDirectory>/share/hadoop/${hadoop.component}/lib</outputDirectory>
       <unpack>false</unpack>
-      <scope>compile</scope>
+      <scope>runtime</scope>
       <useProjectArtifact>false</useProjectArtifact>
       <excludes>
         <exclude>org.apache.ant:*:jar</exclude>

+ 13 - 3
hadoop-assemblies/src/main/resources/assemblies/hadoop-tar.xml

@@ -40,7 +40,7 @@
       <directory>${basedir}/src/main/bin</directory>
       <outputDirectory>/libexec</outputDirectory>
       <includes>
-        <include>hadoop-config.sh</include>
+        <include>*-config.sh</include>
       </includes>
       <fileMode>0755</fileMode>
     </fileSet>
@@ -51,6 +51,16 @@
     <fileSet>
       <directory>${basedir}/src/main/webapps</directory>
       <outputDirectory>/webapps</outputDirectory>
+      <excludes>
+        <exclude>proto-*-web.xml</exclude>
+      </excludes>
+    </fileSet>
+    <fileSet>
+      <directory>${project.build.directory}/webapps</directory>
+      <outputDirectory>/webapps</outputDirectory>
+      <excludes>
+        <exclude>proto-*-web.xml</exclude>
+      </excludes>
     </fileSet>
     <fileSet>
       <directory>${project.build.directory}/site</directory>
@@ -73,11 +83,11 @@
     <dependencySet>
       <outputDirectory>/lib</outputDirectory>
       <unpack>false</unpack>
-      <scope>compile</scope>
+      <scope>runtime</scope>
       <useProjectArtifact>false</useProjectArtifact>
       <excludes>
         <exclude>org.apache.ant:*:jar</exclude>
-        <exclude>org.apache.hadoop:hadoop-*:jar</exclude>
+        <exclude>org.apache.hadoop:hadoop-*:*:*:*</exclude>
         <exclude>jdiff:jdiff:jar</exclude>
       </excludes>
     </dependencySet>

+ 3 - 559
hadoop-common/pom.xml

@@ -16,9 +16,9 @@
   <modelVersion>4.0.0</modelVersion>
   <parent>
     <groupId>org.apache.hadoop</groupId>
-    <artifactId>hadoop-project</artifactId>
+    <artifactId>hadoop-project-distro</artifactId>
     <version>0.23.0-SNAPSHOT</version>
-    <relativePath>../hadoop-project</relativePath>
+    <relativePath>../hadoop-project-distro</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-common</artifactId>
@@ -28,18 +28,12 @@
   <packaging>jar</packaging>
 
   <properties>
-    <test.build.data>${project.build.directory}/test/data</test.build.data>
-    <hadoop.log.dir>${project.build.directory}/log</hadoop.log.dir>
-    <test.build.webapps>${project.build.directory}/test-classes/webapps</test.build.webapps>
-    <test.cache.data>${project.build.directory}/test-classes</test.cache.data>
-    <test.build.classes>${project.build.directory}/test-classes</test.build.classes>
-
-    <build.platform>${os.name}-${os.arch}-${sun.arch.data.model}</build.platform>
     <snappy.prefix>/usr/local</snappy.prefix>
     <snappy.lib>${snappy.prefix}/lib</snappy.lib>
     <bundle.snappy>false</bundle.snappy>
     
     <hadoop.component>common</hadoop.component>
+    <is.hadoop.component>true</is.hadoop.component>
   </properties>
 
   <dependencies>
@@ -247,86 +241,6 @@
 
   <build>
     <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-surefire-plugin</artifactId>
-        <configuration>
-          <forkMode>always</forkMode>
-          <forkedProcessTimeoutInSeconds>600</forkedProcessTimeoutInSeconds>
-          <argLine>-Xmx1024m</argLine>
-          <environmentVariables>
-            <LD_LIBRARY_PATH>${env.LD_LIBRARY_PATH}:${project.build.directory}/native/target/usr/local/lib</LD_LIBRARY_PATH>
-          </environmentVariables>
-          <systemPropertyVariables>
-
-            <!-- TODO: all references in testcases should be updated to this default -->
-            <test.build.data>${test.build.data}</test.build.data>
-            <test.build.webapps>${test.build.webapps}</test.build.webapps>
-            <test.cache.data>${test.cache.data}</test.cache.data>
-            <hadoop.log.dir>${hadoop.log.dir}</hadoop.log.dir>
-            <test.build.classes>${test.build.classes}</test.build.classes>
-
-            <java.net.preferIPv4Stack>true</java.net.preferIPv4Stack>
-            <java.security.krb5.conf>${basedir}/src/test/resources/krb5.conf</java.security.krb5.conf>
-          </systemPropertyVariables>
-          <includes>
-            <include>**/Test*.java</include>
-          </includes>
-          <excludes>
-            <exclude>**/${test.exclude}.java</exclude>
-            <exclude>${test.exclude.pattern}</exclude>
-            <exclude>**/Test*$*.java</exclude>
-          </excludes>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-jar-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>prepare-jar</id>
-            <phase>prepare-package</phase>
-            <goals>
-              <goal>jar</goal>
-            </goals>
-          </execution>
-          <execution>
-            <id>prepare-test-jar</id>
-            <phase>prepare-package</phase>
-            <goals>
-              <goal>test-jar</goal>
-            </goals>
-            <configuration>
-              <includes>
-                <include>**/*.class</include>
-              </includes>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-source-plugin</artifactId>
-        <executions>
-          <execution>
-            <phase>prepare-package</phase>
-            <goals>
-              <goal>jar</goal>
-              <goal>test-jar</goal>
-            </goals>
-          </execution>
-        </executions>
-        <configuration>
-          <attach>true</attach>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>findbugs-maven-plugin</artifactId>
-        <configuration>
-          <excludeFilterFile>${basedir}/dev-support/findbugsExcludeFile.xml</excludeFilterFile>
-        </configuration>
-      </plugin>
       <plugin>
         <groupId>org.apache.avro</groupId>
         <artifactId>avro-maven-plugin</artifactId>
@@ -437,17 +351,6 @@
           </execution>
         </executions>
       </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-checkstyle-plugin</artifactId>
-        <configuration>
-          <configLocation>file://${basedir}/dev-support/checkstyle.xml</configLocation>
-          <failOnViolation>false</failOnViolation>
-          <format>xml</format>
-          <format>html</format>
-          <outputFile>${project.build.directory}/test/checkstyle-errors.xml</outputFile>
-        </configuration>
-      </plugin>
       <plugin>
         <groupId>org.apache.rat</groupId>
         <artifactId>apache-rat-plugin</artifactId>
@@ -467,43 +370,6 @@
           </excludes>
         </configuration>
       </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-javadoc-plugin</artifactId>
-        <configuration>
-          <linksource>true</linksource>
-          <quiet>true</quiet>
-          <verbose>false</verbose>
-          <source>${maven.compile.source}</source>
-          <charset>${maven.compile.encoding}</charset>
-          <reportOutputDirectory>${project.build.directory}/site</reportOutputDirectory>
-          <destDir>api</destDir>
-          <groups>
-            <group>
-              <title>${project.name} API</title>
-              <packages>org.apache.hadoop*</packages>
-            </group>
-          </groups>
-          <doclet>org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsStandardDoclet</doclet>
-          <docletArtifacts>
-            <docletArtifact>
-              <groupId>org.apache.hadoop</groupId>
-              <artifactId>hadoop-annotations</artifactId>
-              <version>${project.version}</version>
-            </docletArtifact>
-          </docletArtifacts>
-          <useStandardDocletOptions>true</useStandardDocletOptions>
-
-          <!-- switch on dependency-driven aggregation -->
-          <includeDependencySources>true</includeDependencySources>
-
-          <dependencySourceIncludes>
-            <!-- include ONLY dependencies I control -->
-            <dependencySourceInclude>org.apache.hadoop:hadoop-annotations</dependencySourceInclude>
-          </dependencySourceIncludes>
-
-        </configuration>
-      </plugin>
     </plugins>
   </build>
 
@@ -640,427 +506,5 @@
         </plugins>
       </build>
     </profile>
-
-    <profile>
-      <id>docs</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-      </activation>
-      <properties>
-        <jdiff.stable.api>0.20.2</jdiff.stable.api>
-        <jdiff.stability>-unstable</jdiff.stability>
-        <jdiff.compatibility></jdiff.compatibility>
-        <jdiff.javadoc.maxmemory>512m</jdiff.javadoc.maxmemory>
-      </properties>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-javadoc-plugin</artifactId>
-            <executions>
-              <execution>
-                <goals>
-                  <goal>javadoc</goal>
-                </goals>
-                <phase>prepare-package</phase>
-              </execution>
-            </executions>
-          </plugin>
-          <plugin>
-            <groupId>org.codehaus.mojo</groupId>
-            <artifactId>findbugs-maven-plugin</artifactId>
-            <executions>
-              <execution>
-                <goals>
-                  <goal>findbugs</goal>
-                </goals>
-                <phase>prepare-package</phase>
-              </execution>
-            </executions>
-            <configuration>
-              <excludeFilterFile>${basedir}/dev-support/findbugsExcludeFile.xml</excludeFilterFile>
-            </configuration>
-          </plugin>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-dependency-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>site</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>copy</goal>
-                </goals>
-                <configuration>
-                  <artifactItems>
-                    <artifactItem>
-                      <groupId>jdiff</groupId>
-                      <artifactId>jdiff</artifactId>
-                      <version>${jdiff.version}</version>
-                      <overWrite>false</overWrite>
-                      <outputDirectory>${project.build.directory}</outputDirectory>
-                      <destFileName>jdiff.jar</destFileName>
-                    </artifactItem>
-                    <artifactItem>
-                      <groupId>org.apache.hadoop</groupId>
-                      <artifactId>hadoop-annotations</artifactId>
-                      <version>${hadoop.annotations.version}</version>
-                      <overWrite>false</overWrite>
-                      <outputDirectory>${project.build.directory}</outputDirectory>
-                      <destFileName>hadoop-annotations.jar</destFileName>
-                    </artifactItem>
-                  </artifactItems>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>site</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>run</goal>
-                </goals>
-                <configuration>
-                  <target>
-
-                    <mkdir dir="${project.build.directory}/docs-src"/>
-
-                    <copy todir="${project.build.directory}/docs-src">
-                      <fileset dir="${basedir}/src/main/docs"/>
-                    </copy>
-
-                    <!-- Docs -->
-                    <exec dir="${project.build.directory}/docs-src"
-                          executable="${env.FORREST_HOME}/bin/forrest"
-                          failonerror="true">
-                    </exec>
-                    <copy todir="${project.build.directory}/site">
-                      <fileset dir="${project.build.directory}/docs-src/build/site"/>
-                    </copy>
-                    <copy file="${project.build.directory}/docs-src/releasenotes.html"
-                          todir="${project.build.directory}/site"/>
-                    <style basedir="${basedir}/src/main/resources"
-                           destdir="${project.build.directory}/site"
-                           includes="core-default.xml"
-                           style="${basedir}/src/main/xsl/configuration.xsl"/>
-
-                    <!-- Convert 'CHANGES.txt' to 'changes.html" -->
-                    <exec executable="perl" input="${basedir}/../CHANGES.txt"
-                          output="${project.build.directory}/site/changes.html"
-                          failonerror="true">
-                      <arg value="${project.build.directory}/docs-src/changes/changes2html.pl"/>
-                    </exec>
-                    <copy todir="${project.build.directory}/site">
-                      <fileset dir="${project.build.directory}/docs-src/changes" includes="*.css"/>
-                    </copy>
-
-                    <!-- Jdiff -->
-                    <mkdir dir="${project.build.directory}/site/jdiff/xml"/>
-
-                    <javadoc maxmemory="${jdiff.javadoc.maxmemory}" verbose="yes">
-                      <doclet name="org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsJDiffDoclet"
-                              path="${project.build.directory}/hadoop-annotations.jar:${project.build.directory}/jdiff.jar">
-                        <param name="-apidir" value="${project.build.directory}/site/jdiff/xml"/>
-                        <param name="-apiname" value="hadoop-core ${project.version}"/>
-                        <param name="${jdiff.stability}"/>
-                      </doclet>
-                      <packageset dir="${basedir}/src/main/java"/>
-                      <classpath>
-                        <path refid="maven.compile.classpath"/>
-                      </classpath>
-                    </javadoc>
-                    <javadoc sourcepath="${basedir}/src/main/java"
-                             destdir="${project.build.directory}/site/jdiff/xml"
-                             sourceFiles="${basedir}/dev-support/jdiff/Null.java"
-                             maxmemory="${jdiff.javadoc.maxmemory}">
-                      <doclet name="org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsJDiffDoclet"
-                              path="${project.build.directory}/hadoop-annotations.jar:${project.build.directory}/jdiff.jar">
-                        <param name="-oldapi" value="hadoop-core ${jdiff.stable.api}"/>
-                        <param name="-newapi" value="hadoop-core ${project.version}"/>
-                        <param name="-oldapidir" value="${basedir}/dev-support/jdiff"/>
-                        <param name="-newapidir" value="${project.build.directory}/site/jdiff/xml"/>
-                        <param name="-javadocold"
-                               value="http://hadoop.apache.org/docs/${jdiff.stable.api}/api/"/>
-                        <param name="-javadocnew" value="${project.build.directory}/site/api"/>
-                        <param name="-stats"/>
-                        <param name="${jdiff.stability}"/>
-                        <param name="${jdiff.compatibility}"/>
-                      </doclet>
-                      <classpath>
-                        <path refid="maven.compile.classpath"/>
-                      </classpath>
-                    </javadoc>
-
-                    <xslt style="${env.FINDBUGS_HOME}/src/xsl/default.xsl"
-                          in="${project.build.directory}/findbugsXml.xml"
-                          out="${project.build.directory}/site/findbugs.html"/>
-
-                  </target>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-
-    <profile>
-      <id>src</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-      </activation>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-assembly-plugin</artifactId>
-            <dependencies>
-              <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-assemblies</artifactId>
-                <version>${hadoop.assemblies.version}</version>
-              </dependency>
-            </dependencies>
-            <executions>
-              <execution>
-                <id>pre-tar-src</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>single</goal>
-                </goals>
-                <configuration>
-                  <appendAssemblyId>false</appendAssemblyId>
-                  <attach>false</attach>
-                  <finalName>${project.artifactId}-${project.version}</finalName>
-                  <descriptorRefs>
-                    <descriptorRef>hadoop-src</descriptorRef>
-                  </descriptorRefs>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-
-    <profile>
-      <id>tar</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-      </activation>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>pre-tar</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>run</goal>
-                </goals>
-                <configuration>
-                  <target>
-                    <!-- Using Unix script to preserve symlinks -->
-                    <echo file="${project.build.directory}/tar-copynativelibs.sh">
-
-                      which cygpath 2> /dev/null
-                      if [ $? = 1 ]; then
-                        BUILD_DIR="${project.build.directory}"
-                      else
-                        BUILD_DIR=`cygpath --unix '${project.build.directory}'`
-                      fi
-                      TAR='tar cf -'
-                      UNTAR='tar xfBp -'
-                      LIB_DIR="${BUILD_DIR}/native/target/usr/local/lib"
-                      if [ -d $${LIB_DIR} ] ; then
-                      TARGET_DIR="${BUILD_DIR}/${project.artifactId}-${project.version}/lib/native/${build.platform}"
-                      mkdir -p $${TARGET_DIR}
-                      cd $${LIB_DIR}
-                      $$TAR *hadoop* | (cd $${TARGET_DIR}/; $$UNTAR)
-                      if [ "${bundle.snappy}" = "true" ] ; then
-                      cd ${snappy.lib}
-                      $$TAR *snappy* | (cd $${TARGET_DIR}/; $$UNTAR)
-                      fi
-                      fi
-                    </echo>
-                    <exec executable="sh" dir="${project.build.directory}" failonerror="true">
-                      <arg line="./tar-copynativelibs.sh"/>
-                    </exec>
-                  </target>
-                </configuration>
-              </execution>
-              <execution>
-                <id>tar</id>
-                <phase>package</phase>
-                <goals>
-                  <goal>run</goal>
-                </goals>
-                <configuration>
-                  <target>
-                    <!-- Using Unix script to preserve symlinks -->
-                    <echo file="${project.build.directory}/tar-maketar.sh">
-
-                      which cygpath 2> /dev/null
-                      if [ $? = 1 ]; then
-                        BUILD_DIR="${project.build.directory}"
-                      else
-                        BUILD_DIR=`cygpath --unix '${project.build.directory}'`
-                      fi
-                      cd ${BUILD_DIR}
-                      tar czf ${project.artifactId}-${project.version}.tar.gz ${project.artifactId}-${project.version}
-                    </echo>
-                    <exec executable="sh" dir="${project.build.directory}" failonerror="true">
-                      <arg line="./tar-maketar.sh"/>
-                    </exec>
-                  </target>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-assembly-plugin</artifactId>
-            <dependencies>
-              <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-assemblies</artifactId>
-                <version>${hadoop.assemblies.version}</version>
-              </dependency>
-            </dependencies>
-            <executions>
-              <execution>
-                <id>pre-tar</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>single</goal>
-                </goals>
-                <configuration>
-                  <appendAssemblyId>false</appendAssemblyId>
-                  <attach>false</attach>
-                  <finalName>${project.artifactId}-${project.version}</finalName>
-                  <descriptorRefs>
-                    <descriptorRef>hadoop-tar</descriptorRef>
-                  </descriptorRefs>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-
-    <profile>
-      <id>bintar</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-      </activation>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>pre-bintar</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>run</goal>
-                </goals>
-                <configuration>
-                  <target>
-                    <!-- Using Unix script to preserve symlinks -->
-                    <echo file="${project.build.directory}/bintar-copynativelibs.sh">
-
-                      which cygpath 2> /dev/null
-                      if [ $? = 1 ]; then
-                        BUILD_DIR="${project.build.directory}"
-                      else
-                        BUILD_DIR=`cygpath --unix '${project.build.directory}'`
-                      fi
-                      TAR='tar cf -'
-                      UNTAR='tar xfBp -'
-                      LIB_DIR="${BUILD_DIR}/native/target/usr/local/lib"
-                      if [ -d $${LIB_DIR} ] ; then
-                      TARGET_DIR="${BUILD_DIR}/${project.artifactId}-${project.version}-bin/lib"
-                      mkdir -p $${TARGET_DIR}
-                      cd $${LIB_DIR}
-                      $$TAR *hadoop* | (cd $${TARGET_DIR}/; $$UNTAR)
-                      if [ "${bundle.snappy}" = "true" ] ; then
-                      cd ${snappy.lib}
-                      $$TAR *snappy* | (cd $${TARGET_DIR}/; $$UNTAR)
-                      fi
-                      fi
-                    </echo>
-                    <exec executable="sh" dir="${project.build.directory}" failonerror="true">
-                      <arg line="./bintar-copynativelibs.sh"/>
-                    </exec>
-                  </target>
-                </configuration>
-              </execution>
-              <execution>
-                <id>bintar</id>
-                <phase>package</phase>
-                <goals>
-                  <goal>run</goal>
-                </goals>
-                <configuration>
-                  <target>
-                    <!-- Using Unix script to preserve symlinks -->
-                    <echo file="${project.build.directory}/bintar-maketar.sh">
-
-                      which cygpath 2> /dev/null
-                      if [ $? = 1 ]; then
-                        BUILD_DIR="${project.build.directory}"
-                      else
-                        BUILD_DIR=`cygpath --unix '${project.build.directory}'`
-                      fi
-                      cd ${BUILD_DIR}
-                      tar czf ${project.artifactId}-${project.version}-bin.tar.gz ${project.artifactId}-${project.version}-bin
-                    </echo>
-                    <exec executable="sh" dir="${project.build.directory}" failonerror="true">
-                      <arg line="./bintar-maketar.sh"/>
-                    </exec>
-                  </target>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-assembly-plugin</artifactId>
-            <dependencies>
-              <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-assemblies</artifactId>
-                <version>${hadoop.assemblies.version}</version>
-              </dependency>
-            </dependencies>
-            <executions>
-              <execution>
-                <id>pre-bintar</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>single</goal>
-                </goals>
-                <configuration>
-                  <appendAssemblyId>false</appendAssemblyId>
-                  <attach>false</attach>
-                  <finalName>${project.artifactId}-${project.version}-bin</finalName>
-                  <descriptorRefs>
-                    <descriptorRef>hadoop-bintar</descriptorRef>
-                  </descriptorRefs>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
   </profiles>
 </project>

+ 2 - 0
hdfs/CHANGES.txt → hadoop-hdfs/CHANGES.txt

@@ -671,6 +671,8 @@ Trunk (unreleased changes)
     HDFS-2260. Refactor BlockReader into an interface and implementation.
     (todd)
 
+    HDFS-2096. Mavenization of hadoop-hdfs (Alejandro Abdelnur via tomwhite)
+
   OPTIMIZATIONS
 
     HDFS-1458. Improve checkpoint performance by avoiding unnecessary image

+ 0 - 0
hdfs/LICENSE.txt → hadoop-hdfs/LICENSE.txt


+ 0 - 0
hdfs/NOTICE.txt → hadoop-hdfs/NOTICE.txt


+ 0 - 0
hdfs/src/test/all-tests → hadoop-hdfs/dev-support/all-tests


+ 0 - 0
hdfs/src/test/checkstyle-noframes-sorted.xsl → hadoop-hdfs/dev-support/checkstyle-noframes-sorted.xsl


+ 3 - 3
hdfs/src/test/checkstyle.xml → hadoop-hdfs/dev-support/checkstyle.xml

@@ -35,7 +35,7 @@
 
     <!-- Checks that a package.html file exists for each package.     -->
     <!-- See http://checkstyle.sf.net/config_javadoc.html#PackageHtml -->
-    <module name="PackageHtml"/>
+    <module name="JavadocPackage"/>
 
     <!-- Checks whether files end with a new line.                        -->
     <!-- See http://checkstyle.sf.net/config_misc.html#NewlineAtEndOfFile -->
@@ -45,6 +45,8 @@
     <!-- See http://checkstyle.sf.net/config_misc.html#Translation -->
     <module name="Translation"/>
 
+    <module name="FileLength"/>
+    <module name="FileTabCharacter"/>
 
     <module name="TreeWalker">
 
@@ -96,7 +98,6 @@
 
         <!-- Checks for Size Violations.                    -->
         <!-- See http://checkstyle.sf.net/config_sizes.html -->
-        <module name="FileLength"/>
         <module name="LineLength"/>
         <module name="MethodLength"/>
         <module name="ParameterNumber"/>
@@ -110,7 +111,6 @@
         <module name="NoWhitespaceBefore"/>
         <module name="ParenPad"/>
         <module name="TypecastParenPad"/>
-        <module name="TabCharacter"/>
         <module name="WhitespaceAfter">
 	    	<property name="tokens" value="COMMA, SEMI"/>
 		</module>

+ 0 - 0
hdfs/src/test/commit-tests → hadoop-hdfs/dev-support/commit-tests


+ 0 - 0
hdfs/src/test/findbugsExcludeFile.xml → hadoop-hdfs/dev-support/findbugsExcludeFile.xml


+ 0 - 0
hdfs/lib/jdiff/hadoop-hdfs_0.20.0.xml → hadoop-hdfs/dev-support/jdiff/hadoop-hdfs_0.20.0.xml


+ 0 - 0
hdfs/lib/jdiff/hadoop-hdfs_0.21.0.xml → hadoop-hdfs/dev-support/jdiff/hadoop-hdfs_0.21.0.xml


+ 0 - 0
hdfs/src/test/smoke-tests → hadoop-hdfs/dev-support/smoke-tests


+ 3 - 0
hdfs/src/test/test-patch.properties → hadoop-hdfs/dev-support/test-patch.properties

@@ -13,6 +13,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+# The number of acceptable warning for this module
+# Please update the root test-patch.properties if you update this file.
+
 OK_RELEASEAUDIT_WARNINGS=0
 OK_FINDBUGS_WARNINGS=0
 OK_JAVADOC_WARNINGS=0

+ 406 - 0
hadoop-hdfs/pom.xml

@@ -0,0 +1,406 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project>
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>hadoop-project-distro</artifactId>
+    <version>0.23.0-SNAPSHOT</version>
+    <relativePath>../hadoop-project-distro</relativePath>
+  </parent>
+  <groupId>org.apache.hadoop</groupId>
+  <artifactId>hadoop-hdfs</artifactId>
+  <version>0.23.0-SNAPSHOT</version>
+  <description>Apache Hadoop HDFS</description>
+  <name>Apache Hadoop HDFS</name>
+  <packaging>jar</packaging>
+
+  <properties>
+    <hadoop.component>hdfs</hadoop.component>
+    <is.hadoop.component>true</is.hadoop.component>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.aspectj</groupId>
+      <artifactId>aspectjtools</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.aspectj</groupId>
+      <artifactId>aspectjrt</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-annotations</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <scope>test</scope>
+      <type>test-jar</type>
+    </dependency>
+    <dependency>
+      <groupId>commons-logging</groupId>
+      <artifactId>commons-logging</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-daemon</groupId>
+      <artifactId>commons-daemon</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>log4j</groupId>
+      <artifactId>log4j</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.google.protobuf</groupId>
+      <artifactId>protobuf-java</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.avro</groupId>
+      <artifactId>avro</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-log4j12</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-all</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.ant</groupId>
+      <artifactId>ant</artifactId>
+      <scope>provided</scope>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.codehaus.mojo.jspc</groupId>
+        <artifactId>jspc-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>hdfs</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>compile</goal>
+            </goals>
+            <configuration>
+              <compile>false</compile>
+              <workingDirectory>${project.build.directory}/generated-src/main/jsp</workingDirectory>
+              <webFragmentFile>${project.build.directory}/hdfs-jsp-servlet-definitions.xml</webFragmentFile>
+              <packageName>org.apache.hadoop.hdfs.server.namenode</packageName>
+              <sources>
+                <directory>${basedir}/src/main/webapps/hdfs</directory>
+                <includes>
+                  <include>*.jsp</include>
+                </includes>
+              </sources>
+            </configuration>
+          </execution>
+          <execution>
+            <id>secondary</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>compile</goal>
+            </goals>
+            <configuration>
+              <compile>false</compile>
+              <workingDirectory>${project.build.directory}/generated-src/main/jsp</workingDirectory>
+              <webFragmentFile>${project.build.directory}/secondary-jsp-servlet-definitions.xml</webFragmentFile>
+              <packageName>org.apache.hadoop.hdfs.server.namenode</packageName>
+              <sources>
+                <directory>${basedir}/src/main/webapps/secondary</directory>
+                <includes>
+                  <include>*.jsp</include>
+                </includes>
+              </sources>
+            </configuration>
+          </execution>
+          <execution>
+            <id>datanode</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>compile</goal>
+            </goals>
+            <configuration>
+              <compile>false</compile>
+              <workingDirectory>${project.build.directory}/generated-src/main/jsp</workingDirectory>
+              <webFragmentFile>${project.build.directory}/datanode-jsp-servlet-definitions.xml</webFragmentFile>
+              <packageName>org.apache.hadoop.hdfs.server.datanode</packageName>
+              <sources>
+                <directory>${basedir}/src/main/webapps/datanode</directory>
+                <includes>
+                  <include>*.jsp</include>
+                </includes>
+              </sources>
+            </configuration>
+          </execution>
+        </executions>
+        <dependencies>
+          <dependency>
+            <groupId>org.codehaus.mojo.jspc</groupId>
+            <artifactId>jspc-compiler-tomcat5</artifactId>
+            <version>2.0-alpha-3</version>
+          </dependency>
+          <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-log4j12</artifactId>
+            <version>1.4.1</version>
+          </dependency>
+          <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>jcl104-over-slf4j</artifactId>
+            <version>1.4.1</version>
+          </dependency>
+        </dependencies>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>build-helper-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>add-source</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>add-source</goal>
+            </goals>
+            <configuration>
+              <sources>
+                <source>${project.build.directory}/generated-src/main/jsp</source>
+              </sources>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>create-web-xmls</id>
+            <phase>compile</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <target>
+                <loadfile property="hdfs.servlet.definitions" srcFile="${project.build.directory}/hdfs-jsp-servlet-definitions.xml"/>
+                <loadfile property="secondary.servlet.definitions" srcFile="${project.build.directory}/secondary-jsp-servlet-definitions.xml"/>
+                <loadfile property="datanode.servlet.definitions" srcFile="${project.build.directory}/datanode-jsp-servlet-definitions.xml"/>
+                <echoproperties destfile="${project.build.directory}/webxml.properties">
+                  <propertyset>
+                    <propertyref regex=".*.servlet.definitions"/>
+                  </propertyset>
+                </echoproperties>
+                <filter filtersfile="${project.build.directory}/webxml.properties"/>
+                <copy file="${basedir}/src/main/webapps/proto-hdfs-web.xml"
+                      tofile="${project.build.directory}/webapps/hdfs/WEB-INF/web.xml"
+                      filtering="true"/>
+                <copy file="${basedir}/src/main/webapps/proto-secondary-web.xml"
+                      tofile="${project.build.directory}/webapps/secondary/WEB-INF/web.xml"
+                      filtering="true"/>
+                <copy file="${basedir}/src/main/webapps/proto-datanode-web.xml"
+                      tofile="${project.build.directory}/webapps/datanode/WEB-INF/web.xml"
+                      filtering="true"/>
+                <copy toDir="${project.build.directory}/webapps">
+                  <fileset dir="${basedir}/src/main/webapps">
+                    <exclude name="**/*.jsp"/>
+                    <exclude name="**/proto-*-web.xml"/>
+                  </fileset>
+                </copy>
+              </target>
+            </configuration>
+          </execution>
+          <execution>
+            <id>create-log-dir</id>
+            <phase>process-test-resources</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <target>
+                <delete dir="${test.build.data}"/>
+                <mkdir dir="${hadoop.log.dir}"/>
+
+                <copy todir="${project.build.directory}/test-classes/webapps">
+                  <fileset dir="${project.build.directory}/webapps">
+                    <exclude name="proto-*-web.xml"/>
+                  </fileset>
+                </copy>
+              </target>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.rat</groupId>
+        <artifactId>apache-rat-plugin</artifactId>
+        <configuration>
+          <excludes>
+            <exclude>CHANGES.txt</exclude>
+            <exclude>.idea/**</exclude>
+            <exclude>src/main/conf/*</exclude>
+            <exclude>src/main/docs/**</exclude>
+            <exclude>dev-support/findbugsExcludeFile.xml</exclude>
+            <exclude>dev-support/checkstyle*</exclude>
+            <exclude>dev-support/jdiff/**</exclude>
+            <exclude>dev-support/*tests</exclude>
+            <exclude>src/main/native/*</exclude>
+            <exclude>src/main/native/config/*</exclude>
+            <exclude>src/main/native/m4/*</exclude>
+            <exclude>src/test/empty-file</exclude>
+            <exclude>src/test/all-tests</exclude>
+            <exclude>src/test/resources/*.tgz</exclude>
+            <exclude>src/test/resources/data*</exclude>
+            <exclude>src/test/resources/editStored*</exclude>
+            <exclude>src/test/resources/empty-file</exclude>
+            <exclude>src/main/webapps/datanode/robots.txt</exclude>
+            <exclude>src/main/docs/releasenotes.html</exclude>
+            <exclude>src/contrib/**</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+
+  <profiles>
+    <profile>
+      <id>native</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-antrun-plugin</artifactId>
+            <executions>
+              <execution>
+                <id>compile</id>
+                <phase>compile</phase>
+                <goals>
+                  <goal>run</goal>
+                </goals>
+                <configuration>
+                  <target>
+                    <copy toDir="${project.build.directory}/native">
+                      <fileset dir="${basedir}/src/main/native"/>
+                    </copy>
+                    <mkdir dir="${project.build.directory}/native/m4"/>
+                  </target>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+          <plugin>
+            <groupId>org.codehaus.mojo</groupId>
+            <artifactId>make-maven-plugin</artifactId>
+            <executions>
+              <execution>
+                <id>compile</id>
+                <phase>compile</phase>
+                <goals>
+                  <goal>autoreconf</goal>
+                  <goal>configure</goal>
+                  <goal>make-install</goal>
+                </goals>
+                <configuration>
+                  <!-- autoreconf settings -->
+                  <workDir>${project.build.directory}/native</workDir>
+                  <arguments>
+                    <argument>-i</argument>
+                    <argument>-f</argument>
+                  </arguments>
+
+                  <!-- configure settings -->
+                  <configureEnvironment>
+                    <property>
+                      <name>ac_cv_func_malloc_0_nonnull</name>
+                      <value>yes</value>
+                    </property>
+                    <property>
+                      <name>JVM_ARCH</name>
+                      <value>${sun.arch.data.model}</value>
+                    </property>
+                  </configureEnvironment>
+                  <configureOptions>
+                  </configureOptions>
+                  <configureWorkDir>${project.build.directory}/native</configureWorkDir>
+                  <prefix>/usr/local</prefix>
+
+                  <!-- make settings -->
+                  <installEnvironment>
+                    <property>
+                      <name>ac_cv_func_malloc_0_nonnull</name>
+                      <value>yes</value>
+                    </property>
+                    <property>
+                      <name>JVM_ARCH</name>
+                      <value>${sun.arch.data.model}</value>
+                    </property>
+                  </installEnvironment>
+
+                  <!-- configure & make settings -->
+                  <destDir>${project.build.directory}/native/target</destDir>
+
+                </configuration>
+              </execution>
+              
+              <!-- TODO wire here native testcases
+              <execution>
+                <id>test</id>
+                <phase>test</phase>
+                <goals>
+                  <goal>test</goal>
+                </goals>
+                <configuration>
+                  <destDir>${project.build.directory}/native/target</destDir>
+                </configuration>
+              </execution>
+              -->
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
+</project>

+ 0 - 0
hdfs/src/ant/org/apache/hadoop/ant/DfsTask.java → hadoop-hdfs/src/ant/org/apache/hadoop/ant/DfsTask.java


+ 0 - 0
hdfs/src/ant/org/apache/hadoop/ant/antlib.xml → hadoop-hdfs/src/ant/org/apache/hadoop/ant/antlib.xml


+ 0 - 0
hdfs/src/ant/org/apache/hadoop/ant/condition/DfsBaseConditional.java → hadoop-hdfs/src/ant/org/apache/hadoop/ant/condition/DfsBaseConditional.java


+ 0 - 0
hdfs/src/ant/org/apache/hadoop/ant/condition/DfsExists.java → hadoop-hdfs/src/ant/org/apache/hadoop/ant/condition/DfsExists.java


+ 0 - 0
hdfs/src/ant/org/apache/hadoop/ant/condition/DfsIsDir.java → hadoop-hdfs/src/ant/org/apache/hadoop/ant/condition/DfsIsDir.java


+ 0 - 0
hdfs/src/ant/org/apache/hadoop/ant/condition/DfsZeroLen.java → hadoop-hdfs/src/ant/org/apache/hadoop/ant/condition/DfsZeroLen.java


+ 0 - 0
hdfs/src/contrib/build-contrib.xml → hadoop-hdfs/src/contrib/build-contrib.xml


+ 0 - 0
hdfs/src/contrib/build.xml → hadoop-hdfs/src/contrib/build.xml


+ 0 - 0
hdfs/src/contrib/fuse-dfs/Makefile.am → hadoop-hdfs/src/contrib/fuse-dfs/Makefile.am


+ 0 - 0
hdfs/src/contrib/fuse-dfs/README → hadoop-hdfs/src/contrib/fuse-dfs/README


+ 0 - 0
hdfs/src/contrib/fuse-dfs/acinclude.m4 → hadoop-hdfs/src/contrib/fuse-dfs/acinclude.m4


+ 0 - 0
hdfs/src/contrib/fuse-dfs/build.xml → hadoop-hdfs/src/contrib/fuse-dfs/build.xml


+ 0 - 0
hdfs/src/contrib/fuse-dfs/configure.ac → hadoop-hdfs/src/contrib/fuse-dfs/configure.ac


+ 0 - 0
hdfs/src/contrib/fuse-dfs/global_footer.mk → hadoop-hdfs/src/contrib/fuse-dfs/global_footer.mk


+ 0 - 0
hdfs/src/contrib/fuse-dfs/global_header.mk → hadoop-hdfs/src/contrib/fuse-dfs/global_header.mk


+ 0 - 0
hdfs/src/contrib/fuse-dfs/ivy.xml → hadoop-hdfs/src/contrib/fuse-dfs/ivy.xml


+ 0 - 0
hdfs/src/contrib/fuse-dfs/ivy/libraries.properties → hadoop-hdfs/src/contrib/fuse-dfs/ivy/libraries.properties


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/Makefile.am → hadoop-hdfs/src/contrib/fuse-dfs/src/Makefile.am


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_connect.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_connect.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_connect.h → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_connect.h


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_context_handle.h → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_context_handle.h


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_dfs.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_dfs.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_dfs.h → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_dfs.h


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_dfs_wrapper.sh → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_dfs_wrapper.sh


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_file_handle.h → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_file_handle.h


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls.h → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls.h


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_access.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_access.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_chmod.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_chmod.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_chown.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_chown.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_create.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_create.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_flush.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_flush.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_getattr.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_getattr.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_mkdir.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_mkdir.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_mknod.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_mknod.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_open.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_open.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_read.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_read.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_readdir.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_readdir.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_release.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_release.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_rename.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_rename.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_rmdir.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_rmdir.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_statfs.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_statfs.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_symlink.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_symlink.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_truncate.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_truncate.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_unlink.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_unlink.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_utimens.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_utimens.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_impls_write.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_write.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_init.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_init.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_init.h → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_init.h


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_options.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_options.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_options.h → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_options.h


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_stat_struct.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_stat_struct.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_stat_struct.h → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_stat_struct.h


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_trash.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_trash.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_trash.h → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_trash.h


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_users.c → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_users.c


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/fuse_users.h → hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_users.h


+ 0 - 0
hdfs/src/contrib/fuse-dfs/src/test/TestFuseDFS.java → hadoop-hdfs/src/contrib/fuse-dfs/src/test/TestFuseDFS.java


+ 0 - 0
hdfs/bin/distribute-exclude.sh → hadoop-hdfs/src/main/bin/distribute-exclude.sh


+ 0 - 0
hdfs/bin/hdfs → hadoop-hdfs/src/main/bin/hdfs


+ 0 - 0
hdfs/bin/hdfs-config.sh → hadoop-hdfs/src/main/bin/hdfs-config.sh


+ 0 - 0
hdfs/bin/refresh-namenodes.sh → hadoop-hdfs/src/main/bin/refresh-namenodes.sh


+ 0 - 0
hdfs/bin/start-balancer.sh → hadoop-hdfs/src/main/bin/start-balancer.sh


+ 0 - 0
hdfs/bin/start-dfs.sh → hadoop-hdfs/src/main/bin/start-dfs.sh


+ 0 - 0
hdfs/bin/start-secure-dns.sh → hadoop-hdfs/src/main/bin/start-secure-dns.sh


+ 0 - 0
hdfs/bin/stop-balancer.sh → hadoop-hdfs/src/main/bin/stop-balancer.sh


+ 0 - 0
hdfs/bin/stop-dfs.sh → hadoop-hdfs/src/main/bin/stop-dfs.sh


+ 0 - 0
hdfs/bin/stop-secure-dns.sh → hadoop-hdfs/src/main/bin/stop-secure-dns.sh


+ 17 - 0
hdfs/conf/hadoop-metrics2.properties → hadoop-hdfs/src/main/conf/hadoop-metrics2.properties

@@ -1,3 +1,20 @@
+#
+#   Licensed to the Apache Software Foundation (ASF) under one or more
+#   contributor license agreements.  See the NOTICE file distributed with
+#   this work for additional information regarding copyright ownership.
+#   The ASF licenses this file to You under the Apache License, Version 2.0
+#   (the "License"); you may not use this file except in compliance with
+#   the License.  You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+
 # syntax: [prefix].[source|sink].[instance].[options]
 # See javadoc of package-info.java for org.apache.hadoop.metrics2 for details
 

+ 21 - 0
hadoop-hdfs/src/main/conf/hdfs-site.xml

@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+</configuration>

+ 0 - 0
hdfs/src/docs/changes/ChangesFancyStyle.css → hadoop-hdfs/src/main/docs/changes/ChangesFancyStyle.css


+ 0 - 0
hdfs/src/docs/changes/ChangesSimpleStyle.css → hadoop-hdfs/src/main/docs/changes/ChangesSimpleStyle.css


+ 0 - 0
hdfs/src/docs/changes/changes2html.pl → hadoop-hdfs/src/main/docs/changes/changes2html.pl


+ 0 - 0
hdfs/src/docs/forrest.properties → hadoop-hdfs/src/main/docs/forrest.properties


+ 0 - 0
hdfs/src/docs/releasenotes.html → hadoop-hdfs/src/main/docs/releasenotes.html


+ 0 - 0
hdfs/src/docs/src/documentation/README.txt → hadoop-hdfs/src/main/docs/src/documentation/README.txt


+ 0 - 0
hdfs/src/docs/src/documentation/classes/CatalogManager.properties → hadoop-hdfs/src/main/docs/src/documentation/classes/CatalogManager.properties


+ 0 - 0
hdfs/src/docs/src/documentation/conf/cli.xconf → hadoop-hdfs/src/main/docs/src/documentation/conf/cli.xconf


+ 0 - 0
hdfs/src/docs/src/documentation/content/xdocs/SLG_user_guide.xml → hadoop-hdfs/src/main/docs/src/documentation/content/xdocs/SLG_user_guide.xml


+ 0 - 0
hdfs/src/docs/src/documentation/content/xdocs/faultinject_framework.xml → hadoop-hdfs/src/main/docs/src/documentation/content/xdocs/faultinject_framework.xml


+ 0 - 0
hdfs/src/docs/src/documentation/content/xdocs/hdfs_design.xml → hadoop-hdfs/src/main/docs/src/documentation/content/xdocs/hdfs_design.xml


+ 0 - 0
hdfs/src/docs/src/documentation/content/xdocs/hdfs_editsviewer.xml → hadoop-hdfs/src/main/docs/src/documentation/content/xdocs/hdfs_editsviewer.xml


+ 0 - 0
hdfs/src/docs/src/documentation/content/xdocs/hdfs_imageviewer.xml → hadoop-hdfs/src/main/docs/src/documentation/content/xdocs/hdfs_imageviewer.xml


برخی فایل ها در این مقایسه diff نمایش داده نمی شوند زیرا تعداد فایل ها بسیار زیاد است