浏览代码

HADOOP-6671. Use maven for hadoop common builds. Contributed by Alejandro Abdelnur.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1153184 13f79535-47bb-0310-9956-ffa450edef68
Thomas White 14 年之前
父节点
当前提交
0f6dfeeacb
共有 100 个文件被更改,包括 1520 次插入3204 次删除
  1. 7 0
      .gitignore
  2. 0 1927
      common/build.xml
  3. 0 24
      common/conf/configuration.xsl
  4. 0 8
      common/conf/core-site.xml.template
  5. 0 66
      common/conf/hadoop-env.sh.template
  6. 0 106
      common/conf/hadoop-policy.xml.template
  7. 0 1
      common/conf/masters.template
  8. 0 2
      common/conf/slaves.template
  9. 0 331
      common/ivy.xml
  10. 0 139
      common/ivy/hadoop-common-instrumented-template.xml
  11. 0 159
      common/ivy/hadoop-common-template.xml
  12. 0 58
      common/ivy/hadoop-common-test-template.xml
  13. 0 50
      common/ivy/ivysettings.xml
  14. 0 90
      common/ivy/libraries.properties
  15. 0 86
      common/src/fixFontsPath.sh
  16. 0 78
      common/src/native/packageNativeHadoop.sh
  17. 0 11
      common/src/packages/templates/conf/core-site.xml
  18. 0 0
      dev-support/smart-apply-patch.sh
  19. 3 33
      dev-support/test-patch.properties
  20. 43 29
      dev-support/test-patch.sh
  21. 38 0
      hadoop-annotations/pom.xml
  22. 0 0
      hadoop-annotations/src/main/java/org/apache/hadoop/classification/InterfaceAudience.java
  23. 0 0
      hadoop-annotations/src/main/java/org/apache/hadoop/classification/InterfaceStability.java
  24. 0 0
      hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/ExcludePrivateAnnotationsJDiffDoclet.java
  25. 0 0
      hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/ExcludePrivateAnnotationsStandardDoclet.java
  26. 0 0
      hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/RootDocProcessor.java
  27. 0 0
      hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java
  28. 0 0
      hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/package-info.java
  29. 99 0
      hadoop-assemblies/pom.xml
  30. 113 0
      hadoop-assemblies/src/main/resources/assemblies/hadoop-bintar.xml
  31. 37 0
      hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml
  32. 85 0
      hadoop-assemblies/src/main/resources/assemblies/hadoop-tar.xml
  33. 61 0
      hadoop-common/BUILDING.txt
  34. 3 0
      hadoop-common/CHANGES.txt
  35. 0 0
      hadoop-common/LICENSE.txt
  36. 0 0
      hadoop-common/NOTICE.txt
  37. 0 0
      hadoop-common/README.txt
  38. 3 3
      hadoop-common/dev-support/checkstyle.xml
  39. 0 0
      hadoop-common/dev-support/findbugsExcludeFile.xml
  40. 0 0
      hadoop-common/dev-support/jdiff/hadoop-core_0.20.0.xml
  41. 0 0
      hadoop-common/dev-support/jdiff/hadoop-core_0.21.0.xml
  42. 0 0
      hadoop-common/dev-support/jdiff/hadoop_0.17.0.xml
  43. 0 0
      hadoop-common/dev-support/jdiff/hadoop_0.18.1.xml
  44. 0 0
      hadoop-common/dev-support/jdiff/hadoop_0.18.2.xml
  45. 0 0
      hadoop-common/dev-support/jdiff/hadoop_0.18.3.xml
  46. 0 0
      hadoop-common/dev-support/jdiff/hadoop_0.19.0.xml
  47. 0 0
      hadoop-common/dev-support/jdiff/hadoop_0.19.1.xml
  48. 0 0
      hadoop-common/dev-support/jdiff/hadoop_0.19.2.xml
  49. 0 0
      hadoop-common/dev-support/jdiff/hadoop_0.20.0.xml
  50. 0 0
      hadoop-common/dev-support/jdiff/hadoop_0.20.1.xml
  51. 0 0
      hadoop-common/dev-support/jdiff/hadoop_0.20.2.xml
  52. 9 3
      hadoop-common/dev-support/saveVersion.sh
  53. 1019 0
      hadoop-common/pom.xml
  54. 0 0
      hadoop-common/src/contrib/bash-tab-completion/README
  55. 0 0
      hadoop-common/src/contrib/bash-tab-completion/hadoop.sh
  56. 0 0
      hadoop-common/src/main/bin/hadoop
  57. 0 0
      hadoop-common/src/main/bin/hadoop-config.sh
  58. 0 0
      hadoop-common/src/main/bin/hadoop-daemon.sh
  59. 0 0
      hadoop-common/src/main/bin/hadoop-daemons.sh
  60. 0 0
      hadoop-common/src/main/bin/rcc
  61. 0 0
      hadoop-common/src/main/bin/slaves.sh
  62. 0 0
      hadoop-common/src/main/bin/start-all.sh
  63. 0 0
      hadoop-common/src/main/bin/stop-all.sh
  64. 0 0
      hadoop-common/src/main/conf/hadoop-metrics.properties
  65. 0 0
      hadoop-common/src/main/conf/hadoop-metrics2.properties
  66. 0 0
      hadoop-common/src/main/conf/log4j.properties
  67. 0 0
      hadoop-common/src/main/conf/ssl-client.xml.example
  68. 0 0
      hadoop-common/src/main/conf/ssl-server.xml.example
  69. 0 0
      hadoop-common/src/main/docs/changes/ChangesFancyStyle.css
  70. 0 0
      hadoop-common/src/main/docs/changes/ChangesSimpleStyle.css
  71. 0 0
      hadoop-common/src/main/docs/changes/changes2html.pl
  72. 0 0
      hadoop-common/src/main/docs/forrest.properties
  73. 0 0
      hadoop-common/src/main/docs/releasenotes.html
  74. 0 0
      hadoop-common/src/main/docs/src/documentation/README.txt
  75. 0 0
      hadoop-common/src/main/docs/src/documentation/classes/CatalogManager.properties
  76. 0 0
      hadoop-common/src/main/docs/src/documentation/conf/cli.xconf
  77. 0 0
      hadoop-common/src/main/docs/src/documentation/content/xdocs/Superusers.xml
  78. 0 0
      hadoop-common/src/main/docs/src/documentation/content/xdocs/cluster_setup.xml
  79. 0 0
      hadoop-common/src/main/docs/src/documentation/content/xdocs/commands_manual.xml
  80. 0 0
      hadoop-common/src/main/docs/src/documentation/content/xdocs/deployment_layout.xml
  81. 0 0
      hadoop-common/src/main/docs/src/documentation/content/xdocs/file_system_shell.xml
  82. 0 0
      hadoop-common/src/main/docs/src/documentation/content/xdocs/index.xml
  83. 0 0
      hadoop-common/src/main/docs/src/documentation/content/xdocs/native_libraries.xml
  84. 0 0
      hadoop-common/src/main/docs/src/documentation/content/xdocs/service_level_auth.xml
  85. 0 0
      hadoop-common/src/main/docs/src/documentation/content/xdocs/single_node_setup.xml
  86. 0 0
      hadoop-common/src/main/docs/src/documentation/content/xdocs/site.xml
  87. 0 0
      hadoop-common/src/main/docs/src/documentation/content/xdocs/tabs.xml
  88. 0 0
      hadoop-common/src/main/docs/src/documentation/resources/images/architecture.gif
  89. 0 0
      hadoop-common/src/main/docs/src/documentation/resources/images/common-logo.jpg
  90. 0 0
      hadoop-common/src/main/docs/src/documentation/resources/images/core-logo.gif
  91. 0 0
      hadoop-common/src/main/docs/src/documentation/resources/images/favicon.ico
  92. 0 0
      hadoop-common/src/main/docs/src/documentation/resources/images/hadoop-logo-big.jpg
  93. 0 0
      hadoop-common/src/main/docs/src/documentation/resources/images/hadoop-logo.jpg
  94. 0 0
      hadoop-common/src/main/docs/src/documentation/resources/images/hdfsarchitecture.gif
  95. 0 0
      hadoop-common/src/main/docs/src/documentation/resources/images/hdfsarchitecture.odg
  96. 0 0
      hadoop-common/src/main/docs/src/documentation/resources/images/hdfsarchitecture.png
  97. 0 0
      hadoop-common/src/main/docs/src/documentation/resources/images/hdfsdatanodes.gif
  98. 0 0
      hadoop-common/src/main/docs/src/documentation/resources/images/hdfsdatanodes.odg
  99. 0 0
      hadoop-common/src/main/docs/src/documentation/resources/images/hdfsdatanodes.png
  100. 0 0
      hadoop-common/src/main/docs/src/documentation/skinconf.xml

+ 7 - 0
.gitignore

@@ -0,0 +1,7 @@
+*.iml
+*.ipr
+*.iws
+.idea
+.svn
+.classpath
+target

+ 0 - 1927
common/build.xml

@@ -1,1927 +0,0 @@
-<?xml version="1.0"?>
-
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-
-<project name="Hadoop-Common" default="compile" 
-   xmlns:ivy="antlib:org.apache.ivy.ant"
-   xmlns:artifact="urn:maven-artifact-ant"> 
-
-  <!-- Load all the default properties, and any the user wants    -->
-  <!-- to contribute (without having to type -D or edit this file -->
-  <property file="${user.home}/build.properties" />
-  <property file="${basedir}/build.properties" />
- 
-  <property name="module" value="common"/>
-  <property name="Name" value="Hadoop-${module}"/>
-  <property name="name" value="hadoop-${module}"/>
-  <property name="_version" value="0.23.0"/>
-  <property name="version" value="${_version}-SNAPSHOT"/>
-  <property name="final.name" value="${name}-${version}"/>
-  <property name="test.final.name" value="${name}-test-${version}"/>
-  <property name="year" value="2009"/>
-  <property name="package.release" value="1"/>
-
-  <property name="src.dir" value="${basedir}/src"/>  	
-  <property name="java.src.dir" value="${src.dir}/java"/>
-  <property name="native.src.dir" value="${basedir}/src/native"/>
-
-  <property name="lib.dir" value="${basedir}/lib"/>
-  <property name="conf.dir" value="${basedir}/conf"/>
-  <property name="docs.src" value="${basedir}/src/docs"/>
-  <property name="changes.src" value="${docs.src}/changes"/>
-  <property name="src.webapps" value="${basedir}/src/webapps"/>
-
-  <property name="build.dir" value="${basedir}/build"/>
-  <property name="build.classes" value="${build.dir}/classes"/>
-  <property name="build.src" value="${build.dir}/src"/>
-  <property name="build.webapps" value="${build.dir}/webapps"/>
-
-  <!-- convert spaces to _ so that mac os doesn't break things -->
-  <exec executable="tr" inputstring="${os.name}" 
-        outputproperty="nonspace.os">
-     <arg value="[:space:]"/>
-     <arg value="_"/>
-  </exec>
-  <property name="build.platform" 
-            value="${nonspace.os}-${os.arch}-${sun.arch.data.model}"/>
-  <property name="jvm.arch" 
-            value="${sun.arch.data.model}"/>
-  <property name="build.native" value="${build.dir}/native/${build.platform}"/>
-  <property name="build.docs" value="${build.dir}/docs"/>
-  <property name="build.javadoc" value="${build.docs}/api"/>
-  <property name="build.javadoc.timestamp" value="${build.javadoc}/index.html" />
-  <property name="build.javadoc.dev" value="${build.docs}/dev-api"/>
-  <property name="build.encoding" value="ISO-8859-1"/>
-  <property name="install.c++" value="${build.dir}/c++/${build.platform}"/>
-
-  <property name="test.src.dir" value="${basedir}/src/test"/>
-  <property name="test.build.dir" value="${build.dir}/test"/>
-  <property name="test.generated.dir" value="${test.build.dir}/src"/>
-  <property name="test.build.data" value="${test.build.dir}/data"/>
-  <property name="test.cache.data" value="${test.build.dir}/cache"/>
-  <property name="test.debug.data" value="${test.build.dir}/debug"/>
-  <property name="test.log.dir" value="${test.build.dir}/logs"/>
-  <property name="test.build.classes" value="${test.build.dir}/classes"/>
-  <property name="test.build.extraconf" value="${test.build.dir}/extraconf"/>
-  <property name="test.build.javadoc" value="${test.build.dir}/docs/api"/>
-  <property name="test.build.javadoc.dev" value="${test.build.dir}/docs/dev-api"/>
-  <property name="test.build.webapps" value="${build.dir}/test/webapps"/>
-  <property name="test.include" value="Test*"/>
-  <property name="test.classpath.id" value="test.classpath"/>
-  <property name="test.output" value="no"/>
-  <property name="test.timeout" value="900000"/>
-  <property name="test.junit.output.format" value="plain"/>
-  <property name="test.junit.fork.mode" value="perTest" />
-  <property name="test.junit.printsummary" value="yes" />
-  <property name="test.junit.haltonfailure" value="no" />
-  <property name="test.junit.maxmemory" value="512m" />
-  <property name="test.conf.dir" value="${build.dir}/test/conf" />
-
-  <property name="test.core.build.classes" value="${test.build.dir}/core/classes"/>
-
-  <property name="test.all.tests.file" value="${test.src.dir}/all-tests"/>
-  <property name="test.exclude.file" value="${test.src.dir}/empty-file" />
-
-  <property name="javadoc.link.java"
-	    value="http://java.sun.com/javase/6/docs/api/"/>
-  <property name="javadoc.packages" value="org.apache.hadoop.*"/>
-  <property name="javadoc.maxmemory" value="512m" />
-
-  <property name="dist.dir" value="${build.dir}/${final.name}"/>
-
-  <property name="javac.debug" value="on"/>
-  <property name="javac.optimize" value="on"/>
-  <property name="javac.deprecation" value="off"/>
-  <property name="javac.version" value="1.6"/>
-  <property name="javac.args" value=""/>
-  <property name="javac.args.warnings" value="-Xlint:unchecked"/>
-
-  <property name="clover.db.dir" location="${build.dir}/test/clover/db"/>
-  <property name="clover.report.dir" location="${build.dir}/test/clover/reports"/>
-
-  <property name="rat.reporting.classname" value="rat.Report"/>
-
-  <property name="jdiff.build.dir" value="${build.docs}/jdiff"/>
-  <property name="jdiff.xml.dir" value="${lib.dir}/jdiff"/>
-  <property name="jdiff.stability" value="-unstable"/>
-  <property name="jdiff.compatibility" value=""/>
-  <property name="jdiff.stable" value="0.20.2"/>
-  <property name="jdiff.stable.javadoc" 
-            value="http://hadoop.apache.org/core/docs/r${jdiff.stable}/api/"/>
-
-  <property name="scratch.dir" value="${user.home}/tmp"/>
-  <property name="svn.cmd" value="svn"/>
-  <property name="grep.cmd" value="grep"/>
-  <property name="patch.cmd" value="patch"/>
-  <property name="make.cmd" value="make"/>
-
-	
-  <!-- IVY properteis set here -->
-  <property name="ivy.repo.dir" value="${user.home}/ivyrepo" />
-  <property name="ivy.dir" location="ivy" />
-  <loadproperties srcfile="${ivy.dir}/libraries.properties"/>
-  <property name="asfrepo" value="https://repository.apache.org"/> 
-  <property name="asfsnapshotrepo" value="${asfrepo}/content/repositories/snapshots"/>
-  <property name="asfstagingrepo"
-  value="${asfrepo}/service/local/staging/deploy/maven2"/>
-  <property name="mvnrepo" value="http://repo2.maven.org/maven2"/>
-  <property name="ivy.jar" location="${ivy.dir}/ivy-${ivy.version}.jar"/>
-  <property name="ant_task.jar" location="${ivy.dir}/maven-ant-tasks-${ant-task.version}.jar"/>
-  <property name="ant_task_repo_url" 
-     value="${mvnrepo}/org/apache/maven/maven-ant-tasks/${ant-task.version}/maven-ant-tasks-${ant-task.version}.jar"/>
-  <property name="ivy_repo_url" value="${mvnrepo}/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar"/>
-  <property name="ivysettings.xml" location="${ivy.dir}/ivysettings.xml" />
-  <property name="ivy.org" value="org.apache.hadoop"/>
-  <property name="build.dir" location="build" />
-  <property name="dist.dir" value="${build.dir}/${final.name}"/>
-  <property name="build.ivy.dir" location="${build.dir}/ivy" />
-  <property name="build.ivy.lib.dir" location="${build.ivy.dir}/lib" />
-  <property name="common.ivy.lib.dir" location="${build.ivy.lib.dir}/${ant.project.name}/common"/>
-  <property name="build.ivy.report.dir" location="${build.ivy.dir}/report"/>
-  <property name="build.ivy.maven.dir" location="${build.ivy.dir}/maven"/>
-  <property name="pom.xml" location="${build.ivy.maven.dir}/pom.xml"/>
-  <property name="hadoop-common.pom" location="${ivy.dir}/hadoop-common.xml"/>
-  <property name="build.ivy.maven.common.jar" location="${build.ivy.maven.dir}/hadoop-common-${version}.jar"/>
-  <property name="hadoop-common-test.pom" location="${ivy.dir}/hadoop-common-test.xml" />
-  <property name="build.ivy.maven.common-test.jar" location="${build.ivy.maven.dir}/hadoop-common-test-${version}.jar"/>
-
-  <!--this is the naming policy for artifacts we want pulled down-->
-  <property name="ivy.module" location="hadoop-common" />
-  <property name="ivy.artifact.retrieve.pattern" value="${ant.project.name}/[conf]/[artifact]-[revision].[ext]"/>
-
-  <!--this is how artifacts that get built are named-->
-  <property name="ivy.publish.pattern" value="[artifact]-[revision].[ext]"/>
-  <property name="hadoop-common.jar" location="${build.dir}/${final.name}.jar" />
-  <property name="hadoop-common-test.jar" location="${build.dir}/${test.final.name}.jar" />
-  <property name="hadoop-common-sources.jar" location="${build.dir}/${final.name}-sources.jar" />
-  <property name="hadoop-common-test-sources.jar" location="${build.dir}/${test.final.name}-sources.jar" />
-
-  <!-- jdiff.home property set -->
-  <property name="jdiff.home" value="${build.ivy.lib.dir}/${ant.project.name}/jdiff"/>
-  <property name="jdiff.jar" value="${jdiff.home}/jdiff-${jdiff.version}.jar"/>
-  <property name="xerces.jar" value="${jdiff.home}/xerces-${xerces.version}.jar"/>
-
-  <property name="clover.jar" location="${clover.home}/lib/clover.jar"/>
-  <available property="clover.present" file="${clover.jar}" />
-	
-  <!-- Eclipse properties -->
-  <property name="build.dir.eclipse" value="build/eclipse"/>
-  <property name="build.dir.eclipse-main-classes" value="${build.dir.eclipse}/classes-main"/>
-  <property name="build.dir.eclipse-test-classes" value="${build.dir.eclipse}/classes-test"/>
-  <property name="build.dir.eclipse-test-generated-classes" value="${build.dir.eclipse}/classes-test-generated"/>
-
-  <!-- Use environment -->
-  <property environment="env" />
-
-  <!-- check if clover reports should be generated -->
-  <condition property="clover.enabled">
-    <and>
-        <isset property="run.clover"/>
-        <isset property="clover.present"/>
-    </and>
-  </condition>
-
-  <condition property="staging">
-     <equals arg1="${repo}" arg2="staging"/>
-  </condition>
-
-  <!-- packaging properties -->
-  <property name="package.prefix" value="/usr"/>
-  <property name="package.conf.dir" value="/etc/hadoop"/>
-  <property name="package.log.dir" value="/var/log/hadoop"/>
-  <property name="package.pid.dir" value="/var/run/hadoop"/>
-  <property name="package.var.dir" value="/var/lib/hadoop"/>
-  <property name="package.share.dir" value="/share/hadoop/${module}"/>
-  <!-- Use fixed path to build rpm for avoiding rpmbuild conflict with dash path names -->
-  <property name="package.buildroot" value="/tmp/hadoop_package_build_${user.name}"/>
-  <property name="package.build.dir" value="/tmp/hadoop_package_build_${user.name}/BUILD"/>
-
-  <!-- Indicate is Snappy native library should be bundled with Hadoop or not -->
-  <property name="bundle.snappy" value="false"/>
-
-  <!-- Snappy native library location -->
-  <property name="snappy.prefix" value="/usr/local"/>
-  <property name="snappy.lib" value="${snappy.prefix}/lib"/>
-  <property name="snappy.include" value="${snappy.prefix}/include"/>
-
-  <!-- the normal classpath -->
-  <path id="classpath">
-    <pathelement location="${build.classes}"/>
-    <pathelement location="${conf.dir}"/>
-    <path refid="ivy-common.classpath"/>
-  </path>
-
-  <path id="test.classpath">
-    <pathelement location="${test.build.extraconf}"/>
-    <pathelement location="${test.core.build.classes}" />
-    <pathelement location="${test.src.dir}"/>
-    <pathelement location="${test.build.dir}"/>
-    <pathelement location="${build.dir}"/>
-    <pathelement location="${build.examples}"/>
-    <pathelement location="${build.tools}"/>
-    <pathelement path="${clover.jar}"/>
-    <path refid="ivy-common.classpath"/>
-    <path refid="ivy-test.classpath"/>
-    <pathelement location="${hadoop-common.jar}"/>
-    <pathelement location="${test.conf.dir}"/>
-  </path>
-<!--
-  <path id="test.hdfs.classpath">
-    <pathelement location="${test.hdfs.build.classes}" />
-    <path refid="test.classpath"/>
-  </path>
-
-  <path id="test.mapred.classpath">
-    <pathelement location="${test.mapred.build.classes}" />
-    <path refid="test.hdfs.classpath"/>
-  </path>
-
-  <path id="test.hdfs.with.mr.classpath">
-    <pathelement location="${test.hdfs.with.mr.build.classes}" />
-    <path refid="test.mapred.classpath"/>
-  </path>
--->
-  <!-- the cluster test classpath: uses conf.dir for configuration -->
-  <path id="test.cluster.classpath">
-    <path refid="classpath"/>
-    <pathelement location="${test.build.classes}" />
-    <pathelement location="${test.src.dir}"/>
-    <pathelement location="${build.dir}"/>
-  </path>
-
-
-  <!-- ====================================================== -->
-  <!-- Macro definitions                                      -->
-  <!-- ====================================================== -->
-  <macrodef name="macro_tar" description="Worker Macro for tar">
-    <attribute name="param.destfile"/>
-    <element name="param.listofitems"/>
-    <sequential>
-      <tar compression="gzip" longfile="gnu"
-      destfile="@{param.destfile}">
-      <param.listofitems/>
-      </tar>
-    </sequential>
-  </macrodef>
-
-  <!-- ====================================================== -->
-  <!-- Stuff needed by all targets                            -->
-  <!-- ====================================================== -->
-  <target name="init" depends="ivy-retrieve-common">
-    <mkdir dir="${build.dir}"/>
-    <mkdir dir="${build.classes}"/>
-    <mkdir dir="${build.src}"/>
-    <mkdir dir="${build.webapps}"/>
- 
-    <mkdir dir="${test.build.dir}"/>
-    <mkdir dir="${test.build.classes}"/>
-    <mkdir dir="${test.build.extraconf}"/>
-    <tempfile property="touch.temp.file" destDir="${java.io.tmpdir}"/>
-    <touch millis="0" file="${touch.temp.file}">
-      <fileset dir="${conf.dir}" includes="**/*.template"/>
-    </touch>
-    <delete file="${touch.temp.file}"/>
-    <!-- copy all of the jsp and static files -->
-    <copy todir="${build.webapps}">
-      <fileset dir="${src.webapps}">
-        <exclude name="**/*.jsp" />
-        <exclude name="**/*.jspx" />
-      </fileset>
-    </copy>
-
-    <copy todir="${conf.dir}" verbose="true">
-      <fileset dir="${conf.dir}" includes="**/*.template"/>
-      <mapper type="glob" from="*.template" to="*"/>
-    </copy>
-
-    <mkdir dir="${test.conf.dir}"/>
-    <copy todir="${test.conf.dir}" verbose="true">
-      <fileset dir="${conf.dir}" includes="**/*.template"/>
-      <mapper type="glob" from="*.template" to="*"/>
-    </copy>
-
-    <exec executable="sh">
-       <arg line="src/saveVersion.sh ${version} ${build.dir}"/>
-    </exec>
-	
-  </target>
-
-  <import file="${test.src.dir}/aop/build/aop.xml"/>
-
-  <!-- ====================================================== -->
-  <!-- Compile the Java files                                 -->
-  <!-- ====================================================== -->
-  <target name="record-parser" depends="init" if="javacc.home">
-      <javacc
-          target="${java.src.dir}/org/apache/hadoop/record/compiler/generated/rcc.jj"
-          outputdirectory="${java.src.dir}/org/apache/hadoop/record/compiler/generated"
-          javacchome="${javacc.home}" />
-  </target>
-  
-  <target name="compile-rcc-compiler" depends="init, record-parser">
-    <javac 
-        encoding="${build.encoding}" 
-        srcdir="${java.src.dir}"
-        includes="org/apache/hadoop/record/compiler/**/*.java"
-        destdir="${build.classes}"
-        debug="${javac.debug}"
-        optimize="${javac.optimize}"
-        target="${javac.version}"
-        source="${javac.version}"
-        deprecation="${javac.deprecation}">
-        <compilerarg line="${javac.args}"/>
-        <classpath refid="classpath"/>
-    </javac>
-    
-    <taskdef name="recordcc" classname="org.apache.hadoop.record.compiler.ant.RccTask">
-      <classpath refid="classpath" />
-    </taskdef>
-  </target>
-  
-  <target name="compile-core-classes" depends="init, compile-rcc-compiler">
-    <!-- Compile Java files (excluding JSPs) checking warnings -->
-    <javac 
-     encoding="${build.encoding}" 
-     srcdir="${java.src.dir};${build.src}"	
-     includes="org/apache/hadoop/**/*.java"
-     destdir="${build.classes}"
-     debug="${javac.debug}"
-     optimize="${javac.optimize}"
-     target="${javac.version}"
-     source="${javac.version}"
-     deprecation="${javac.deprecation}">
-      <compilerarg line="${javac.args} ${javac.args.warnings}" />
-      <classpath refid="classpath"/>
-    </javac>
-
-    <copy todir="${build.classes}">
-      <fileset dir="${java.src.dir}" includes="**/*.properties"/>
-      <fileset dir="${java.src.dir}" includes="core-default.xml"/>
-    </copy>
-     
-  </target>
-
-  <target name="compile-native">
-    <antcall target="compile-core-native">
-      <param name="compile.native" value="true"/>
-    </antcall> 
-  </target>
-
-  <target name="check-native-configure" if="compile.native">
-    <condition property="need.native.configure">
-       <not> <available file="${native.src.dir}/configure"/> </not>
-    </condition>
-  </target>
-
-  <target name="create-native-configure" depends="check-native-configure" if="need.native.configure">
-    <mkdir dir="${native.src.dir}/config"/>
-    <mkdir dir="${native.src.dir}/m4"/>
-    <exec executable="autoreconf" dir="${native.src.dir}" 
-          searchpath="yes" failonerror="yes">
-       <arg value="-i"/>
-       <arg value="-f"/>
-    </exec>
-  </target>
-
-  <target name="check-native-makefile" if="compile.native">
-    <condition property="need.native.makefile">
-       <not> <available file="${native.src.dir}/Makefile"/> </not>
-    </condition>
-  </target>
-
-  <target name="create-native-makefile" depends="check-native-makefile" if="need.native.makefile"> 
-    <antcall target="create-native-configure"/>
-    <mkdir dir="${build.native}"/>
-
-    <exec dir="${build.native}" executable="sh" failonerror="true">
-      <env key="OS_NAME" value="${os.name}"/>
-      <env key="OS_ARCH" value="${os.arch}"/>
-      <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/>
-      <env key="HADOOP_NATIVE_SRCDIR" value="${native.src.dir}"/>
-      <arg line="${native.src.dir}/configure CPPFLAGS=-I${snappy.include} LDFLAGS=-L${snappy.lib}"/>
-    </exec>
-  </target>
-
-
-  <target name="compile-core-native" depends="compile-core-classes,create-native-makefile"
-          if="compile.native">
-  	
-    <mkdir dir="${build.native}/lib"/>
-    <mkdir dir="${build.native}/src/org/apache/hadoop/io/compress/zlib"/>
-    <mkdir dir="${build.native}/src/org/apache/hadoop/io/compress/snappy"/>
-    <mkdir dir="${build.native}/src/org/apache/hadoop/io/nativeio"/>
-    <mkdir dir="${build.native}/src/org/apache/hadoop/security"/>
-
-  	<javah 
-  	  classpath="${build.classes}"
-  	  destdir="${build.native}/src/org/apache/hadoop/io/compress/zlib"
-      force="yes"
-  	  verbose="yes"
-  	  >
-  	  <class name="org.apache.hadoop.io.compress.zlib.ZlibCompressor" />
-      <class name="org.apache.hadoop.io.compress.zlib.ZlibDecompressor" />
-  	</javah>
-
-    <javah
-      classpath="${build.classes}"
-      destdir="${build.native}/src/org/apache/hadoop/io/compress/snappy"
-      force="yes"
-      verbose="yes"
-      >
-      <class name="org.apache.hadoop.io.compress.snappy.SnappyCompressor"/>
-      <class name="org.apache.hadoop.io.compress.snappy.SnappyDecompressor"/>
-    </javah>
-
-    <javah
-  	  classpath="${build.classes}"
-  	  destdir="${build.native}/src/org/apache/hadoop/security"
-      force="yes"
-  	  verbose="yes"
-  	  >
-  	  <class name="org.apache.hadoop.security.JniBasedUnixGroupsMapping" />
-  	</javah>
-  	<javah
-  	  classpath="${build.classes}"
-  	  destdir="${build.native}/src/org/apache/hadoop/io/nativeio"
-      force="yes"
-  	  verbose="yes"
-  	  >
-  	  <class name="org.apache.hadoop.io.nativeio.NativeIO" />
-  	</javah>
-
-  	<javah
-  	  classpath="${build.classes}"
-  	  destdir="${build.native}/src/org/apache/hadoop/security"
-      force="yes"
-  	  verbose="yes"
-  	  >
-  	  <class name="org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping" />
-  	</javah>
-
-    <exec dir="${build.native}" executable="${make.cmd}" failonerror="true">
-      <env key="OS_NAME" value="${os.name}"/>
-      <env key="OS_ARCH" value="${os.arch}"/>
-  	  <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/>
-  	  <env key="HADOOP_NATIVE_SRCDIR" value="${native.src.dir}"/>
-    </exec>
-
-	<exec dir="${build.native}" executable="sh" failonerror="true">
-	  <arg line="${build.native}/libtool --mode=install cp ${build.native}/libhadoop.la ${build.native}/lib"/>
-    </exec>
-
-  </target>
-
-  <target name="compile-core"
-          depends="clover,compile-core-classes,
-  	compile-core-native" 
-  	description="Compile core only">
-  </target>
-
-  <target name="compile" depends="compile-core" description="Compile core">
-  </target>
-
-  <!-- ================================================================== -->
-  <!-- Make hadoop-common.jar                                               -->
-  <!-- ================================================================== -->
-  <!--                                                                    -->
-  <!-- ================================================================== -->
-  <target name="jar" depends="compile-core" description="Make hadoop-common.jar">
-    <tar compression="gzip" destfile="${build.classes}/bin.tgz">
-      <tarfileset dir="bin" mode="755"/>
-    </tar>
-    <property name="jar.properties.list" value="commons-logging.properties, hadoop-metrics.properties" />
-    <jar jarfile="${build.dir}/${final.name}.jar"
-         basedir="${build.classes}">
-      <service type="org.apache.hadoop.security.SecurityInfo">
-        <provider 
-           classname="org.apache.hadoop.security.AnnotatedSecurityInfo"/>
-      </service>
-      <manifest>
-        <section name="org/apache/hadoop">
-          <attribute name="Implementation-Title" value="${ant.project.name}"/>
-          <attribute name="Implementation-Version" value="${version}"/>
-          <attribute name="Implementation-Vendor" value="Apache"/>
-        </section>
-      </manifest>
-      <fileset dir="${conf.dir}" includes="${jar.properties.list}" />
-      <fileset file="${jar.extra.properties.list}" />
-    </jar>
-
-    <jar jarfile="${hadoop-common-sources.jar}">
-      <fileset dir="${java.src.dir}" includes="org/apache/hadoop/**/*.java"/>
-      <fileset dir="${build.src}" includes="org/apache/hadoop/**/*.java"/>
-    </jar>
-  </target>
-
-  <!-- ================================================================== -->
-  <!-- Make the Hadoop metrics jar. (for use outside Hadoop)              -->
-  <!-- ================================================================== -->
-  <!--                                                                    -->
-  <!-- ================================================================== -->
-  <target name="metrics.jar" depends="compile-core" description="Make the Hadoop metrics jar. (for use outside Hadoop)">
-    <jar jarfile="${build.dir}/hadoop-metrics-${version}.jar"
-         basedir="${build.classes}">
-      <include name="**/metrics/**" />
-      <exclude name="**/package.html" />
-    </jar>
-  </target>
-
-  <target name="metrics2.jar" depends="compile-core" description="Make the Hadoop metrics2 framework jar (for use plugin development)">
-    <jar jarfile="${build.dir}/hadoop-metrics2-${version}.jar"
-         basedir="${build.classes}">
-      <include name="**/metrics2/**" />
-    </jar>
-  </target>
-
-  <target name="generate-test-records" depends="compile-rcc-compiler">
-    <recordcc destdir="${test.generated.dir}">
-      <fileset dir="${test.src.dir}"
-	         includes="**/*.jr" />
-    </recordcc>
-  </target>
-
-  <target name="generate-avro-records" depends="init, ivy-retrieve-test">
-    <taskdef name="schema" classname="org.apache.avro.specific.SchemaTask">
-      <classpath refid="test.classpath"/>
-    </taskdef>
-    <schema destdir="${test.generated.dir}">
-      <fileset dir="${test.src.dir}">
-        <include name="**/*.avsc" />
-      </fileset>
-    </schema>
-  </target>
-
-  <target name="generate-avro-protocols" depends="init, ivy-retrieve-test">
-    <taskdef name="schema" classname="org.apache.avro.specific.ProtocolTask">
-      <classpath refid="test.classpath"/>
-    </taskdef>
-    <schema destdir="${test.generated.dir}">
-      <fileset dir="${test.src.dir}">
-        <include name="**/*.avpr" />
-     </fileset>
-    </schema>
-  </target>
-
-  <!-- ================================================================== -->
-  <!-- Compile test code                                                  --> 
-  <!-- ================================================================== -->
-  <!-- This is a wrapper for fault-injection needs-->
-  <target name="-classes-compilation"
-    depends="compile-core-classes, compile-core-test"/> 
-
-  <target name="compile-core-test" depends="jar, ivy-retrieve-test, generate-test-records, generate-avro-records, generate-avro-protocols">
-    <mkdir dir="${test.core.build.classes}"/>
-    <javac 
-     encoding="${build.encoding}" 
-     srcdir="${test.generated.dir}"
-     includes="org/apache/hadoop/**/*.java"
-     destdir="${test.core.build.classes}"
-     debug="${javac.debug}"
-     optimize="${javac.optimize}"
-     target="${javac.version}"
-     source="${javac.version}"
-     deprecation="${javac.deprecation}">
-      <compilerarg line="${javac.args}"/>
-      <classpath refid="test.classpath"/>
-    </javac>
-    <javac 
-     encoding="${build.encoding}" 
-     srcdir="${test.src.dir}/core"
-     includes="org/apache/hadoop/**/*.java"
-     destdir="${test.core.build.classes}"
-     debug="${javac.debug}"
-     optimize="${javac.optimize}"
-     target="${javac.version}"
-     source="${javac.version}"
-     deprecation="${javac.deprecation}">
-      <compilerarg line="${javac.args} ${javac.args.warnings}" />
-      <classpath refid="test.classpath"/>
-     </javac>
-
-    <taskdef
-       name="paranamer" 
-       classname="com.thoughtworks.paranamer.ant.ParanamerGeneratorTask">
-      <classpath refid="classpath" />
-    </taskdef>
-    <paranamer sourceDirectory="${test.src.dir}/core"
-	       outputDirectory="${test.core.build.classes}"/>
-
-    <delete dir="${test.cache.data}"/>
-    <mkdir dir="${test.cache.data}"/>
-    <copy file="${test.src.dir}/core/org/apache/hadoop/cli/testConf.xml" todir="${test.cache.data}"/>
-
-  </target>
-
-  <!-- ================================================================== -->
-  <!-- Make hadoop-test.jar                                               -->
-  <!-- ================================================================== -->
-  <!--                                                                    -->
-  <!-- ================================================================== -->
-  <target name="jar-test" depends="compile-core-test" description="Make hadoop-test.jar">
-    <copy todir="${test.build.classes}">
-      <fileset dir="${test.core.build.classes}"/>
-    </copy>
-    <jar jarfile="${build.dir}/${test.final.name}.jar"
-         basedir="${test.build.classes}">
-         <manifest>
-           <attribute name="Main-Class"
-                      value="org/apache/hadoop/test/CoreTestDriver"/>
-          <section name="org/apache/hadoop">
-            <attribute name="Implementation-Title" value="${ant.project.name}"/>
-            <attribute name="Implementation-Version" value="${version}"/>
-            <attribute name="Implementation-Vendor" value="Apache"/>
-          </section>
-         </manifest>
-    </jar>
-
-    <jar jarfile="${hadoop-common-test-sources.jar}">
-      <fileset dir="${test.generated.dir}" includes="org/apache/hadoop/**/*.java"/>
-      <fileset dir="${test.src.dir}/core" includes="org/apache/hadoop/**/*.java"/>
-    </jar>
-  </target>
-
-  <!-- ================================================================== -->
-  <!-- Fault injection customization section.
-       These targets ought to be copied over to other projects and modified
-       as needed -->
-  <!-- ================================================================== -->
-  <target name="run-test-core-fault-inject" depends="injectfaults" 
-	  description="Run full set of the unit tests with fault injection">
-    <macro-run-tests-fault-inject target.name="run-test-core"
-      testcasesonly="false"/>
-  </target>
-
-  <target name="jar-test-fault-inject" depends="injectfaults" 
-    description="Make hadoop-test-fi.jar">
-    <macro-jar-test-fault-inject
-      target.name="jar-test"
-      jar.final.name="test.final.name"
-      jar.final.value="${test.final.name}-fi" />
-  </target>
-
-  <target name="jar-fault-inject" depends="injectfaults" 
-    description="Make hadoop-fi.jar">
-    <macro-jar-fault-inject
-      target.name="jar"
-      build.dir="${build-fi.dir}"
-      jar.final.name="final.name"
-      jar.final.value="${final.name}-fi" />
-  </target>
-
-  <!--This target is not included into the the top level list of target
-  for it serves a special "regression" testing purpose of non-FI tests in
-  FI environment -->
-  <target name="run-fault-inject-with-testcaseonly" depends="injectfaults">
-    <fail unless="testcase">Can't run this target without -Dtestcase setting!
-    </fail>
-    <macro-run-tests-fault-inject target.name="run-test-core" 
-      testcasesonly="true"/>
-  </target>
-  <!-- ================================================================== -->
-  <!-- End of Fault injection customization section                       -->
-  <!-- ================================================================== -->
-
-  <condition property="tests.notestcase">
-    <and>
-      <isfalse value="${test.fault.inject}"/>
-      <not>
-        <isset property="testcase"/>
-      </not>
-    </and>
-  </condition>
-  <condition property="tests.notestcase.fi">
-    <and>
-      <not>
-        <isset property="testcase" />
-      </not>
-      <istrue value="${test.fault.inject}" />
-    </and>
-  </condition>
-  <condition property="tests.testcase">
-    <and>
-      <isfalse value="${test.fault.inject}" />
-      <isset property="testcase" />
-    </and>
-  </condition>
-  <condition property="tests.testcaseonly">
-    <istrue value="${special.fi.testcasesonly}" />
-  </condition>
-  <condition property="tests.testcase.fi">
-    <and>
-      <istrue value="${test.fault.inject}" />
-      <isset property="testcase" />
-      <isfalse value="${special.fi.testcasesonly}" />
-    </and>
-  </condition>
-	     
-  <!-- ================================================================== -->
-  <!-- Run unit tests                                                     --> 
-  <!-- ================================================================== -->
-  <macrodef name="macro-test-runner">
-    <attribute name="test.file" />
-    <attribute name="classpath" />
-    <attribute name="test.dir" />
-    <attribute name="fileset.dir" />
-    <attribute name="hadoop.conf.dir.deployed" default="" />
-    <attribute name="test.krb5.conf.filename" default="" />
-    <sequential>
-      <delete file="${test.build.dir}/testsfailed"/>
-      <delete dir="@{test.dir}/data" />
-      <mkdir dir="@{test.dir}/data" />
-      <delete dir="${test.build.webapps}"/>
-      <copy todir="${test.build.webapps}">
-        <fileset dir="${test.src.dir}/test-webapps" includes="**/*" />
-      </copy>
-      <delete dir="@{test.dir}/logs" />
-      <mkdir dir="@{test.dir}/logs" />
-      <copy file="${test.src.dir}/hadoop-policy.xml"
-            todir="@{test.dir}/extraconf" />
-      <copy file="${test.src.dir}/fi-site.xml"
-            todir="@{test.dir}/extraconf" />
-      <junit showoutput="${test.output}"
-             printsummary="${test.junit.printsummary}"
-             haltonfailure="${test.junit.haltonfailure}"
-             fork="yes"
-             forkmode="${test.junit.fork.mode}"
-             maxmemory="${test.junit.maxmemory}"
-             dir="${basedir}"
-             timeout="${test.timeout}"
-             errorProperty="tests.failed"
-             failureProperty="tests.failed">
-        <jvmarg value="-ea" />
-        <sysproperty key="java.net.preferIPv4Stack" value="true"/>
-        <sysproperty key="test.build.data" value="${test.build.data}" />
-        <sysproperty key="test.cache.data" value="${test.cache.data}" />
-        <sysproperty key="test.debug.data" value="${test.debug.data}" />
-        <sysproperty key="hadoop.log.dir" value="${test.log.dir}" />
-        <sysproperty key="test.src.dir" value="${test.src.dir}" />
-        <sysproperty key="test.build.extraconf" value="@{test.dir}/extraconf" />
-         <sysproperty key="java.security.krb5.conf" value="@{test.krb5.conf.filename}"/>
-        <sysproperty key="hadoop.policy.file" value="hadoop-policy.xml" />
-        <sysproperty key="java.library.path"
-          value="${build.native}/lib:${lib.dir}/native/${build.platform}:${snappy.lib}"/>
-        <sysproperty key="java.security.egd" value="file:///dev/urandom" />
-        <sysproperty key="install.c++.examples" value="${install.c++.examples}"/>
-
-        <!-- set io.compression.codec.lzo.class in the child jvm only if it is set -->
-        <syspropertyset dynamic="no">
-          <propertyref name="io.compression.codec.lzo.class"/>
-        </syspropertyset>
-        <!-- set compile.c++ in the child jvm only if it is set -->
-        <syspropertyset dynamic="no">
-          <propertyref name="compile.c++"/>
-        </syspropertyset>
-        <classpath refid="@{classpath}" />
-        <!-- Pass probability specifications to the spawn JVM -->
-        <syspropertyset id="FaultProbabilityProperties">
-          <propertyref regex="fi.*"/>
-        </syspropertyset>
-        <sysproperty key="test.system.hdrc.deployed.hadoopconfdir"
-                     value="@{hadoop.conf.dir.deployed}" />
-        <!-- user to group mapping class for TestAccessControlList -->
-        <syspropertyset dynamic="no">
-          <propertyref name="TestAccessControlListGroupMapping"/>
-        </syspropertyset>
-        <formatter type="${test.junit.output.format}" />
-        <batchtest todir="@{test.dir}" if="tests.notestcase">
-          <fileset dir="@{fileset.dir}/core"
-                   excludes="**/${test.exclude}.java aop/** system/**">
-             <patternset>
-               <includesfile name="@{test.file}"/>
-               <excludesfile name="${test.exclude.file}"/>
-             </patternset>
-         </fileset>
-        </batchtest>
-        <batchtest todir="${test.build.dir}" if="tests.notestcase.fi">
-          <fileset dir="@{fileset.dir}/aop"
-                   includes="**/${test.include}.java"
-                   excludes="**/${test.exclude}.java"
-                   excludesfile="${test.exclude.file}" />
-         </batchtest>
-         <batchtest todir="@{test.dir}" if="tests.testcase">
-           <fileset dir="@{fileset.dir}/core"
-             includes="**/${testcase}.java" excludes="aop/** system/**"/>
-         </batchtest>
-         <batchtest todir="${test.build.dir}" if="tests.testcase.fi">
-           <fileset dir="@{fileset.dir}/aop" includes="**/${testcase}.java" />
-         </batchtest>
-         <!--The following batch is for very special occasions only when
-                a non-FI tests are needed to be executed against FI-environment -->
-         <batchtest todir="${test.build.dir}" if="tests.testcaseonly">
-           <fileset dir="@{fileset.dir}/core" includes="**/${testcase}.java" />
-         </batchtest>
-      </junit>
-      <antcall target="checkfailure"/>
-    </sequential>
-  </macrodef>
-
-  <target name="run-test-core" depends="compile-core-test" description="Run core unit tests">
-    <macro-test-runner test.file="${test.all.tests.file}"
-                       classpath="${test.classpath.id}"
-                       test.dir="${test.build.dir}"
-                       fileset.dir="${test.src.dir}"
-                       test.krb5.conf.filename="${test.src.dir}/krb5.conf"
-                       >
-    </macro-test-runner>
-  </target>   
-
-  <target name="checkfailure" if="tests.failed">
-    <touch file="${test.build.dir}/testsfailed"/>
-    <fail unless="continueOnFailure">Tests failed!</fail>
-  </target>
-
-  <target name="test-core" description="Run core unit tests">
-    <delete file="${test.build.dir}/testsfailed"/> 
-    <property name="continueOnFailure" value="true"/> 
-    <antcall target="run-test-core"/>
-    <antcall target="run-test-core-fault-inject"/>
-    <available file="${test.build.dir}/testsfailed" property="testsfailed"/>
-    <fail if="testsfailed">Tests failed!</fail> 
-  </target>
-
-  <target name="test" depends="jar-test,test-core" description="Run all unit tests"/>
-
-  <!-- Run all unit tests, not just Test*, and use non-test configuration. -->
-  <target name="test-cluster" description="Run all unit tests, not just Test*, and use non-test configuration.">
-    <antcall target="test">
-      <param name="test.include" value="*"/>
-      <param name="test.classpath.id" value="test.cluster.classpath"/>
-    </antcall>
-  </target>
-
-  <target name="nightly" depends="test, tar">
-  </target>
-	
-  <!-- ================================================================== -->
-  <!-- Run optional third-party tool targets                              --> 
-  <!-- ================================================================== -->
-  <target name="checkstyle" depends="ivy-retrieve-checkstyle,check-for-checkstyle" if="checkstyle.present" 
-       description="Run optional third-party tool targets">
-       <taskdef resource="checkstyletask.properties">
-         <classpath refid="checkstyle-classpath"/>
-       </taskdef>
-  
-	<mkdir dir="${test.build.dir}"/>
-  	
-  	<checkstyle config="${test.src.dir}/checkstyle.xml"
-  		failOnViolation="false">
-      <fileset dir="${java.src.dir}" includes="**/*.java" excludes="**/generated/**"/>
-      <formatter type="xml" toFile="${test.build.dir}/checkstyle-errors.xml"/>
-  	</checkstyle>
-  	
-  	<xslt style="${test.src.dir}/checkstyle-noframes-sorted.xsl"
-        in="${test.build.dir}/checkstyle-errors.xml"
-        out="${test.build.dir}/checkstyle-errors.html"/>
-  </target>
-	
-  <target name="check-for-checkstyle">
-    <available property="checkstyle.present" resource="checkstyletask.properties">
-       <classpath refid="checkstyle-classpath"/>
-    </available>  	
-  </target>
-
-
- <property name="findbugs.home" value=""/>
-  <target name="findbugs" depends="check-for-findbugs, jar" if="findbugs.present" description="Run findbugs if present">
-    <property name="findbugs.out.dir" value="${test.build.dir}/findbugs"/>
-    <property name="findbugs.exclude.file" value="${test.src.dir}/findbugsExcludeFile.xml"/>
-    <property name="findbugs.report.htmlfile" value="${findbugs.out.dir}/hadoop-findbugs-report.html"/>
-    <property name="findbugs.report.xmlfile" value="${findbugs.out.dir}/hadoop-findbugs-report.xml"/>
-    <taskdef name="findbugs" classname="edu.umd.cs.findbugs.anttask.FindBugsTask"
-        classpath="${findbugs.home}/lib/findbugs-ant.jar" />
-
-        <mkdir dir="${findbugs.out.dir}"/>
-
-    <findbugs home="${findbugs.home}" output="xml:withMessages"
-        outputFile="${findbugs.report.xmlfile}" effort="max"
-        excludeFilter="${findbugs.exclude.file}" jvmargs="-Xmx512M">
-      <auxClasspath>
-        <fileset dir="${env.ANT_HOME}/lib">
-          <include name="ant.jar"/>
-          <include name="ant-launcher.jar"/>
-        </fileset>
-        <fileset dir="${build.ivy.lib.dir}/${ant.project.name}/common">
-          <include name="**/*.jar"/>
-        </fileset>
-      </auxClasspath>
-      <sourcePath path="${java.src.dir}"/>
-      <class location="${basedir}/build/${final.name}.jar" />
-    </findbugs>
-
-        <xslt style="${findbugs.home}/src/xsl/default.xsl"
-        in="${findbugs.report.xmlfile}"
-        out="${findbugs.report.htmlfile}"/>
-  </target>
-	
-  <target name="check-for-findbugs">
-    <available property="findbugs.present"
-        file="${findbugs.home}/lib/findbugs.jar" />
-  </target>
-
-
-  <!-- ================================================================== -->
-  <!-- Documentation                                                      -->
-  <!-- ================================================================== -->
-  
-  <target name="docs" depends="forrest.check" description="Generate forrest-based documentation. 
-       To use, specify -Dforrest.home=&lt;base of Apache Forrest installation&gt; on the command line." if="forrest.home">
-    <exec dir="${docs.src}" executable="${forrest.home}/bin/forrest"
-	  failonerror="true">
-    </exec>
-    <copy todir="${build.docs}">
-      <fileset dir="${docs.src}/build/site/" />
-    </copy>
-    <copy file="${docs.src}/releasenotes.html" todir="${build.docs}"/>
-    <style basedir="${java.src.dir}" destdir="${build.docs}"
-           includes="core-default.xml" style="conf/configuration.xsl"/>
-    <antcall target="changes-to-html"/>
-  </target>
-
-  <target name="forrest.check" unless="forrest.home">
-    <fail message="'forrest.home' is not defined. Please pass 
-      -Dforrest.home=&lt;base of Apache Forrest installation&gt; to Ant on the command-line." />
-  </target>
-
-  <target name="javadoc-dev" depends="compile, ivy-retrieve-javadoc" description="Generate javadoc for hadoop developers">
-    <mkdir dir="${build.javadoc.dev}"/>
-    <javadoc
-      overview="${java.src.dir}/overview.html"
-      packagenames="org.apache.hadoop.*"
-      destdir="${build.javadoc.dev}"
-      author="true"
-      version="true"
-      use="true"
-      windowtitle="${Name} ${version} API"
-      doctitle="${Name} ${version} Developer API"
-      bottom="Copyright &amp;copy; ${year} The Apache Software Foundation"
-      maxmemory="${javadoc.maxmemory}">
-        <packageset dir="${java.src.dir}"/>
-
-        <link href="${javadoc.link.java}"/>
-
-        <classpath >
-          <path refid="classpath" />
-          <path refid="javadoc-classpath"/>
-          <pathelement path="${java.class.path}"/>
-          <pathelement location="${build.tools}"/>
-        </classpath>
-
-    	<group title="Core" packages="org.apache.*"/>
-
-    </javadoc>
-  </target>	
-
-  <target name="javadoc-uptodate" depends="compile, ivy-retrieve-javadoc">
-    <uptodate property="javadoc.is.uptodate">
-      <srcfiles dir="${src.dir}">
-        <include name="**/*.java" />
-        <include name="**/*.html" />
-      </srcfiles>
-      <mapper type="merge" to="${build.javadoc.timestamp}" />
-    </uptodate>
-  </target>
- 
-  <target name="javadoc" description="Generate javadoc" depends="jar, javadoc-uptodate"
-       unless="javadoc.is.uptodate">
-    <mkdir dir="${build.javadoc}"/>
-    <javadoc
-      overview="${java.src.dir}/overview.html"
-      packagenames="org.apache.hadoop.*"
-      destdir="${build.javadoc}"
-      author="true"
-      version="true"
-      use="true"
-      windowtitle="${Name} ${version} API"
-      doctitle="${Name} ${version} API"
-      bottom="Copyright &amp;copy; ${year} The Apache Software Foundation"
-      maxmemory="${javadoc.maxmemory}">
-        <packageset dir="${java.src.dir}"/>
-	
-        <link href="${javadoc.link.java}"/>
-
-        <classpath >
-          <path refid="classpath" />
-          <path refid="javadoc-classpath"/>
-          <pathelement path="${java.class.path}"/>
-          <pathelement location="${build.tools}"/>
-        </classpath>
-
-       <group title="Core" packages="org.apache.*"/>
-       <doclet name="org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsStandardDoclet"
-               path="${build.dir}/${final.name}.jar"/>
-    </javadoc>
-  </target>	
-
-  <target name="api-xml" depends="ivy-retrieve-jdiff,javadoc,write-null">
-    <javadoc maxmemory="${javadoc.maxmemory}">
-       <doclet name="org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsJDiffDoclet"
-               path="${build.dir}/${final.name}.jar:${jdiff.jar}:${xerces.jar}">
-         <param name="-apidir" value="${jdiff.xml.dir}"/>
-         <param name="-apiname" value="hadoop-core ${version}"/>
-         <param name="${jdiff.stability}"/>
-       </doclet>
-       <packageset dir="src/java"/>
-       <classpath >
-         <path refid="classpath" />
-         <path refid="jdiff-classpath" />
-         <pathelement path="${java.class.path}"/>
-       </classpath>
-    </javadoc>
-  </target>
-	
-  <target name="write-null">
-	<exec executable="touch">
-	   <arg value="${jdiff.home}/Null.java"/>
-        </exec>
-  </target> 
-
-  <target name="api-report" depends="ivy-retrieve-jdiff,api-xml">
-    <mkdir dir="${jdiff.build.dir}"/>
-    <javadoc sourcepath="src/java"
-             destdir="${jdiff.build.dir}"
-	     sourceFiles="${jdiff.home}/Null.java"
-	     maxmemory="${javadoc.maxmemory}">
-       <doclet name="org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsJDiffDoclet"
-              path="${build.dir}/${final.name}.jar:${jdiff.jar}:${xerces.jar}">
-         <param name="-oldapi" value="hadoop-core ${jdiff.stable}"/>
-         <param name="-newapi" value="hadoop-core ${version}"/>
-         <param name="-oldapidir" value="${jdiff.xml.dir}"/>
-         <param name="-newapidir" value="${jdiff.xml.dir}"/>
-         <param name="-javadocold" value="${jdiff.stable.javadoc}"/>
-         <param name="-javadocnew" value="../../api/"/>
-         <param name="-stats"/>
-         <param name="${jdiff.stability}"/>
-         <param name="${jdiff.compatibility}"/>
-       </doclet>
-       <classpath >
-         <path refid="classpath" />
-         <path refid="jdiff-classpath"/>
-         <pathelement path="${java.class.path}"/>
-       </classpath>
-    </javadoc>
-  </target>
-	
-  <target name="changes-to-html" description="Convert CHANGES.txt into an html file">
-    <mkdir dir="${build.docs}"/>
-    <exec executable="perl" input="CHANGES.txt" output="${build.docs}/changes.html" failonerror="true">
-      <arg value="${changes.src}/changes2html.pl"/>
-    </exec>
-    <copy todir="${build.docs}">
-      <fileset dir="${changes.src}" includes="*.css"/>
-    </copy>
-  </target>
-
-  <!-- ================================================================== -->
-  <!-- D I S T R I B U T I O N                                            -->
-  <!-- ================================================================== -->
-  <!--                                                                    -->
-  <!-- ================================================================== -->
-  <target name="package" depends="compile, jar, javadoc, docs, api-report, create-native-configure, jar-test"
-	  description="Build distribution">
-    <mkdir dir="${dist.dir}"/>
-    <mkdir dir="${dist.dir}/lib"/>
-    <mkdir dir="${dist.dir}/libexec"/>
-    <mkdir dir="${dist.dir}/bin"/>
-    <mkdir dir="${dist.dir}/docs"/>
-    <mkdir dir="${dist.dir}/docs/api"/>
-    <mkdir dir="${dist.dir}/docs/jdiff"/>
-
-    <copy todir="${dist.dir}/lib" includeEmptyDirs="false" flatten="true">
-      <fileset dir="${common.ivy.lib.dir}"/>
-    </copy>
-
-    <copy todir="${dist.dir}/lib" includeEmptyDirs="false">
-      <fileset dir="lib">
-        <exclude name="**/native/**"/>
-      </fileset>
-    </copy>
-
-  	<exec dir="${dist.dir}" executable="sh" failonerror="true">
-	  <env key="BASE_NATIVE_LIB_DIR" value="${lib.dir}/native"/>
-	  <env key="BUILD_NATIVE_DIR" value="${build.dir}/native"/>
-	  <env key="DIST_LIB_DIR" value="${dist.dir}/lib/native"/>
-          <env key="BUNDLE_SNAPPY_LIB" value="${bundle.snappy}"/>
-          <env key="SNAPPY_LIB_DIR" value="${snappy.prefix}/lib"/>
-	  <arg line="${native.src.dir}/packageNativeHadoop.sh"/>
-    </exec>
-
-    <copy todir="${dist.dir}/webapps">
-      <fileset dir="${build.webapps}"/>
-    </copy>
-
-    <copy todir="${dist.dir}"> 
-      <fileset file="${build.dir}/${final.name}.jar"/>
-      <fileset file="${build.dir}/${test.final.name}.jar"/>
-    </copy>
-    
-    <copy todir="${dist.dir}/bin">
-      <fileset dir="bin"/>
-    </copy>
-
-    <copy todir="${dist.dir}/conf">
-      <fileset dir="${conf.dir}" excludes="**/*.template"/>
-    </copy>
-
-    <copy todir="${dist.dir}/docs">
-      <fileset dir="${build.docs}"/>
-    </copy>
-
-    <copy file="ivy.xml" tofile="${dist.dir}/ivy.xml"/>
-
-    <copy todir="${dist.dir}/ivy">
-      <fileset dir="ivy"/>
-    </copy>
-
-    <copy todir="${dist.dir}/libexec">
-      <fileset dir="bin">
-        <include name="hadoop-config.sh"/>
-      </fileset>
-    </copy>
-
-    <copy todir="${dist.dir}">
-      <fileset dir=".">
-        <include name="*.txt" />
-      </fileset>
-    </copy>
-
-    <copy todir="${dist.dir}/src" includeEmptyDirs="true">
-      <fileset dir="src" excludes="**/*.template **/docs/build/**/*"/>
-    </copy>
-  	
-    <copy todir="${dist.dir}/" file="build.xml"/>
-
-    <chmod perm="ugo+x" file="${dist.dir}/src/native/configure"/>
-    <chmod perm="ugo+x" type="file" parallel="false">
-        <fileset dir="${dist.dir}/bin"/>
-    </chmod>
-
-  </target>
-
-  <!-- ================================================================== -->
-  <!-- Make release tarball                                               -->
-  <!-- ================================================================== -->
-  <target name="tar" depends="package" description="Make release tarball">
-    <macro_tar param.destfile="${build.dir}/${final.name}.tar.gz">
-      <param.listofitems>
-        <tarfileset dir="${build.dir}" mode="664">
-          <exclude name="${final.name}/bin/*" />
-          <exclude name="${final.name}/src/native/configure" />
-          <include name="${final.name}/**" />
-        </tarfileset>
-        <tarfileset dir="${build.dir}" mode="755">
-          <include name="${final.name}/bin/*" />
-          <include name="${final.name}/src/native/configure" />
-        </tarfileset>
-      </param.listofitems>
-    </macro_tar>
-  </target>
-
-  <target name="bin-package" depends="compile, jar, jar-test, javadoc" 
-		description="assembles artifacts for binary target">
-    <mkdir dir="${dist.dir}"/>
-    <mkdir dir="${dist.dir}/lib"/>
-    <mkdir dir="${dist.dir}/${package.share.dir}/contrib"/>
-    <mkdir dir="${dist.dir}/${package.share.dir}/templates"/>
-    <mkdir dir="${dist.dir}/${package.share.dir}/webapps"/>
-    <mkdir dir="${dist.dir}/bin"/>
-    <mkdir dir="${dist.dir}/libexec"/>
-    <mkdir dir="${dist.dir}/sbin"/>
-    <mkdir dir="${dist.dir}/var/log"/>
-    <mkdir dir="${dist.dir}/var/run"/>
-
-    <copy todir="${dist.dir}/${package.share.dir}/lib" includeEmptyDirs="false" flatten="true">
-      <fileset dir="${common.ivy.lib.dir}"/>
-    </copy>
-
-    <copy todir="${dist.dir}/${package.share.dir}" includeEmptyDirs="false">
-      <fileset dir="lib">
-        <exclude name="**/native/**"/>
-      </fileset>
-    </copy>
-
-  	<exec dir="${dist.dir}" executable="sh" failonerror="true">
-	  <env key="BASE_NATIVE_LIB_DIR" value="${lib.dir}/native"/>
-	  <env key="BUILD_NATIVE_DIR" value="${build.dir}/native"/>
-	  <env key="DIST_LIB_DIR" value="${dist.dir}/lib"/>
-          <env key="BUNDLE_SNAPPY_LIB" value="${bundle.snappy}"/>
-          <env key="SNAPPY_LIB_DIR" value="${snappy.prefix}/lib"/>
-	  <arg line="${native.src.dir}/packageNativeHadoop.sh"/>
-    </exec>
-
-    <copy todir="${dist.dir}/${package.share.dir}"> 
-      <fileset file="${build.dir}/*.jar"/>
-    </copy>
-    
-    <copy todir="${dist.dir}/bin">
-      <fileset dir="bin">
-        <include name="hadoop"/>
-      </fileset>
-    </copy>
-
-    <copy todir="${dist.dir}/libexec">
-      <fileset dir="bin">
-        <include name="hadoop-config.sh"/>
-      </fileset>
-    </copy>
-
-    <copy todir="${dist.dir}/sbin">
-      <fileset dir="bin">
-        <include name="hadoop-daemon.sh"/>
-        <include name="hadoop-daemons.sh"/>
-        <include name="slaves.sh"/>
-        <include name="start-all.sh"/>
-        <include name="stop-all.sh"/>
-      </fileset>
-      <fileset dir="${basedir}/src/packages">
-        <include name="*.sh" />
-      </fileset>
-    </copy>
-
-    <copy todir="${dist.dir}/etc/hadoop">
-      <fileset dir="${conf.dir}" excludes="**/*.template"/>
-      <fileset dir="${conf.dir}" includes="hadoop-env.sh.template"/>
-    </copy>
-
-    <copy todir="${dist.dir}/${package.share.dir}/templates">
-      <fileset dir="${basedir}/src/packages/templates/conf" includes="*"/>
-    </copy>
-
-    <copy todir="${dist.dir}/share/doc/hadoop/${module}">
-      <fileset dir=".">
-        <include name="*.txt" />
-      </fileset>
-    </copy>
-
-    <chmod perm="ugo+x" type="file" parallel="false">
-        <fileset dir="${dist.dir}/bin"/>
-        <fileset dir="${dist.dir}/sbin"/>
-    </chmod>
-  </target>
-
-  <target name="binary" depends="bin-package" description="Make tarball without source and documentation">
-    <macro_tar param.destfile="${build.dir}/${final.name}-bin.tar.gz">
-      <param.listofitems>
-        <tarfileset dir="${build.dir}" mode="664">
-          <exclude name="${final.name}/bin/*" />
-          <exclude name="${final.name}/libexec/*" />
-          <exclude name="${final.name}/sbin/*" />
-          <exclude name="${final.name}/src/**" />
-          <exclude name="${final.name}/docs/**" />
-          <include name="${final.name}/**" />
-        </tarfileset>
-        <tarfileset dir="${build.dir}" mode="755">
-          <include name="${final.name}/bin/*" />
-          <include name="${final.name}/libexec/*" />
-          <include name="${final.name}/sbin/*" />
-        </tarfileset>
-      </param.listofitems>
-    </macro_tar>
-  </target>
-
-  <target name="rpm" depends="binary" description="Make rpm package">
-    <mkdir dir="${package.buildroot}/BUILD" />
-    <mkdir dir="${package.buildroot}/RPMS" />
-    <mkdir dir="${package.buildroot}/SRPMS" />
-    <mkdir dir="${package.buildroot}/SOURCES" />
-    <mkdir dir="${package.buildroot}/SPECS" />
-    <copy todir="${package.buildroot}/SOURCES">
-      <fileset dir="${build.dir}">
-        <include name="${final.name}-bin.tar.gz" />
-      </fileset>
-    </copy>
-    <copy file="${src.dir}/packages/rpm/spec/hadoop.spec" todir="${package.buildroot}/SPECS">
-      <filterchain>
-        <replacetokens>
-          <token key="final.name" value="${final.name}" />
-          <token key="version" value="${_version}" />
-          <token key="package.release" value="${package.release}" />
-          <token key="package.build.dir" value="${package.build.dir}" />
-          <token key="package.prefix" value="${package.prefix}" />
-          <token key="package.conf.dir" value="${package.conf.dir}" />
-          <token key="package.log.dir" value="${package.log.dir}" />
-          <token key="package.pid.dir" value="${package.pid.dir}" />
-          <token key="package.var.dir" value="${package.var.dir}" />
-        </replacetokens>
-      </filterchain>
-    </copy>
-    <rpm specFile="hadoop.spec" command="-bb --target ${os.arch}" topDir="${package.buildroot}" cleanBuildDir="true" failOnError="true"/>
-    <copy todir="${build.dir}/" flatten="true">
-      <fileset dir="${package.buildroot}/RPMS">
-        <include name="**/*.rpm" />
-      </fileset>
-    </copy>
-    <delete dir="${package.buildroot}" quiet="true" verbose="false"/>
-  </target>
-
-  <target name="deb" depends="ivy-retrieve-package, binary" description="Make deb package">
-    <taskdef name="deb"
-           classname="org.vafer.jdeb.ant.DebAntTask">
-      <classpath refid="ivy-package.classpath" />
-    </taskdef>
-
-    <mkdir dir="${package.build.dir}/hadoop.control" />
-    <mkdir dir="${package.buildroot}/${package.prefix}" />
-    <copy todir="${package.buildroot}/${package.prefix}">
-      <fileset dir="${build.dir}/${final.name}">
-        <include name="**" />
-      </fileset>
-    </copy>
-    <copy todir="${package.build.dir}/hadoop.control">
-      <fileset dir="${src.dir}/packages/deb/hadoop.control">
-        <exclude name="control" />
-      </fileset>
-    </copy>
-    <copy file="${src.dir}/packages/deb/hadoop.control/control" todir="${package.build.dir}/hadoop.control">
-      <filterchain>
-        <replacetokens>
-          <token key="final.name" value="${final.name}" />
-          <token key="version" value="${_version}" />
-          <token key="package.release" value="${package.release}" />
-          <token key="package.build.dir" value="${package.build.dir}" />
-          <token key="package.prefix" value="${package.prefix}" />
-          <token key="package.conf.dir" value="${package.conf.dir}" />
-          <token key="package.log.dir" value="${package.log.dir}" />
-          <token key="package.pid.dir" value="${package.pid.dir}" />
-        </replacetokens>
-      </filterchain>
-    </copy>
-    <deb destfile="${package.buildroot}/${name}_${_version}-${package.release}_${os.arch}.deb" control="${package.build.dir}/hadoop.control">
-      <tarfileset dir="${build.dir}/${final.name}" filemode="644" prefix="${package.prefix}">
-        <exclude name="bin" />
-        <exclude name="etc" />
-        <exclude name="libexec" />
-        <exclude name="etc/**" />
-        <exclude name="sbin" />
-        <include name="**" />
-      </tarfileset>
-      <tarfileset dir="${build.dir}/${final.name}/bin" filemode="755" prefix="${package.prefix}/bin">
-        <include name="*" />
-      </tarfileset>
-      <tarfileset dir="${build.dir}/${final.name}/libexec" filemode="755" prefix="${package.prefix}/libexec">
-        <include name="*" />
-      </tarfileset>
-      <tarfileset dir="${build.dir}/${final.name}/sbin" filemode="755" prefix="${package.prefix}/sbin">
-        <include name="*" />
-      </tarfileset>
-      <tarfileset dir="${src.dir}/packages" filemode="755" prefix="${package.prefix}/sbin">
-        <include name="*.sh" />
-      </tarfileset>
-      <tarfileset dir="${build.dir}/${final.name}/etc/hadoop" filemode="644" prefix="${package.conf.dir}">
-        <exclude name="core-site.xml" />
-        <exclude name="hdfs-site.xml" />
-        <exclude name="mapred-site.xml" />
-        <include name="**" /> 
-      </tarfileset>
-    </deb>
-    <copy todir="${build.dir}/" flatten="true">
-      <fileset dir="${package.buildroot}">
-        <include name="**/hadoop*.deb" />
-      </fileset>
-    </copy>
-    <delete dir="${package.buildroot}" quiet="true" verbose="false"/>
-  </target>
-  
-  <target name="ant-task-download" description="To download mvn-ant-task" unless="offline">
-    <get src="${ant_task_repo_url}" dest="${ant_task.jar}" usetimestamp="true"/>
-  </target>
-
-  <target name="mvn-taskdef" depends="ant-task-download">
-     <path id="mvn-ant-task.classpath" path="${ant_task.jar}"/> 
-     <typedef resource="org/apache/maven/artifact/ant/antlib.xml" 
-         uri="urn:maven-artifact-ant"
-         classpathref="mvn-ant-task.classpath"/>
-  </target>   
-
-  <target name="mvn-install" depends="mvn-taskdef,jar,jar-test,set-version"
-    description="Install hadoop common and test jars to local fs m2 repo">
-     <artifact:pom file="${hadoop-common.pom}" id="hadoop.core"/>
-     <artifact:pom file="${hadoop-common-test.pom}" id="hadoop.core.test"/>
-     <artifact:install file="${hadoop-common.jar}">
-        <pom refid="hadoop.core"/>
-	<attach file="${hadoop-common-sources.jar}" classifier="sources" />
-     </artifact:install>
-     <artifact:install file="${hadoop-common-test.jar}">
-        <pom refid="hadoop.core.test"/>
-	<attach file="${hadoop-common-test-sources.jar}" classifier="sources" />
-     </artifact:install>
-  </target>
-
-  <target name="mvn-si-install" depends="mvn-install,-mvn-system-install"
-     description="Install system integration test jars as well"/>
-
-  <target name="mvn-deploy" depends="mvn-taskdef, jar, jar-test,
-     jar-system, set-version, signanddeploy, simpledeploy"
-     description="To deploy hadoop common and test jar's to apache
-     snapshot's repository"/>
-
-  <target name="signanddeploy" if="staging" depends="sign">
-     <artifact:pom file="${hadoop-common.pom}" id="hadoop.core"/>
-     <artifact:pom file="${hadoop-common-test.pom}" id="hadoop.core.test"/>
-     <artifact:pom file="${hadoop-common-instrumented.pom}" 
-       id="hadoop.core.${herriot.suffix}"/>
-     <artifact:install-provider artifactId="wagon-http"
-     version="${wagon-http.version}"/>
-
-     <artifact:deploy file="${hadoop-common.jar}">
-       <remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/>
-       <pom refid="hadoop.core"/>
-       <attach file="${hadoop-common.jar}.asc" type="jar.asc"/>
-       <attach file="${hadoop-common.pom}.asc" type="pom.asc"/>
-       <attach file="${hadoop-common-sources.jar}.asc" type="jar.asc"
-         classifier="sources"/>
-       <attach file="${hadoop-common-sources.jar}" classifier="sources"/>
-     </artifact:deploy>
-
-     <artifact:deploy file="${hadoop-common-test.jar}">
-       <remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/>
-       <pom refid="hadoop.core.test"/>
-       <attach file="${hadoop-common-test.jar}.asc" type="jar.asc"/>
-       <attach file="${hadoop-common-test.pom}.asc" type="pom.asc"/>
-       <attach file="${hadoop-common-test-sources.jar}.asc" type="jar.asc"
-         classifier="sources"/>
-       <attach file="${hadoop-common-test-sources.jar}" classifier="sources"/>
-     </artifact:deploy>
-
-     <artifact:deploy file="${hadoop-common-instrumented.jar}">
-       <remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/>
-       <pom refid="hadoop.core.${herriot.suffix}"/>
-       <attach file="${hadoop-common-instrumented.jar}.asc" type="jar.asc"/>
-       <attach file="${hadoop-common-instrumented.pom}.asc" type="pom.asc"/>
-       <attach file="${hadoop-common-instrumented-sources.jar}.asc" 
-         type="jar.asc" classifier="sources"/>
-       <attach file="${hadoop-common-instrumented-sources.jar}"
-         classifier="sources"/>
-     </artifact:deploy>
-  </target>
-
-  <target name="sign" depends="clean-sign" if="staging">
-    <input message="password:>" addproperty="gpg.passphrase">
-     <handler classname="org.apache.tools.ant.input.SecureInputHandler" />
-    </input>
-    <macrodef name="sign-artifact" description="Signs the artifact">
-      <attribute name="input.file"/>
-      <attribute name="output.file" default="@{input.file}.asc"/>
-      <attribute name="gpg.passphrase"/>
-      <sequential>
-        <echo>Signing @{input.file} Sig File: @{output.file}</echo>
-        <exec executable="gpg" >
-          <arg value="--armor"/>
-          <arg value="--output"/>
-          <arg value="@{output.file}"/>
-          <arg value="--passphrase"/>
-          <arg value="@{gpg.passphrase}"/>
-          <arg value="--detach-sig"/>
-          <arg value="@{input.file}"/>
-        </exec>
-      </sequential>
-    </macrodef>
-    <sign-artifact input.file="${hadoop-common.jar}" 
-     output.file="${hadoop-common.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
-    <sign-artifact input.file="${hadoop-common-test.jar}" 
-     output.file="${hadoop-common-test.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
-    <sign-artifact input.file="${hadoop-common-sources.jar}" 
-     output.file="${hadoop-common-sources.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
-    <sign-artifact input.file="${hadoop-common-test-sources.jar}" 
-     output.file="${hadoop-common-test-sources.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
-    <sign-artifact input.file="${hadoop-common.pom}" 
-     output.file="${hadoop-common.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
-    <sign-artifact input.file="${hadoop-common-test.pom}" 
-     output.file="${hadoop-common-test.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
-    <sign-artifact input.file="${hadoop-common-instrumented.jar}" 
-     output.file="${hadoop-common-instrumented.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
-    <sign-artifact input.file="${hadoop-common-instrumented.pom}" 
-     output.file="${hadoop-common-instrumented.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
-    <sign-artifact input.file="${hadoop-common-instrumented-sources.jar}" 
-     output.file="${hadoop-common-instrumented-sources.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
-  </target>
-
-  <target name="simpledeploy" unless="staging">
-     <artifact:pom file="${hadoop-common.pom}" id="hadoop.core"/>
-     <artifact:pom file="${hadoop-common-test.pom}" id="hadoop.test"/>
-     <artifact:pom file="${hadoop-common-instrumented.pom}" 
-       id="hadoop.core.${herriot.suffix}"/>
-
-     <artifact:install-provider artifactId="wagon-http" version="${wagon-http.version}"/>
-     <artifact:deploy file="${hadoop-common.jar}">
-         <remoteRepository id="apache.snapshots.https" url="${asfsnapshotrepo}"/>
-         <pom refid="hadoop.core"/>
-	 <attach file="${hadoop-common-sources.jar}" classifier="sources" />
-     </artifact:deploy>
-
-     <artifact:deploy file="${hadoop-common-test.jar}">
-         <remoteRepository id="apache.snapshots.https" url="${asfsnapshotrepo}"/>
-         <pom refid="hadoop.core.test"/>
-	 <attach file="${hadoop-common-test-sources.jar}" classifier="sources" />
-     </artifact:deploy> 
-
-     <artifact:deploy file="${hadoop-common-instrumented.jar}">
-         <remoteRepository id="apache.snapshots.https" url="${asfsnapshotrepo}"/>
-         <pom refid="hadoop.core.${herriot.suffix}"/>
-         <attach file="${hadoop-common-instrumented-sources.jar}" classifier="sources" />
-     </artifact:deploy>
-  </target>
-
-  <target name="set-version">
-    <delete file="${basedir}/ivy/hadoop-common.xml"/>
-    <delete file="${basedir}/ivy/hadoop-common-test.xml"/>
-    <delete file="${basedir}/ivy/hadoop-common-${herriot.suffix}.xml"/>
-    <copy file="${basedir}/ivy/hadoop-common-template.xml" tofile="${basedir}/ivy/hadoop-common.xml"/>
-    <copy file="${basedir}/ivy/hadoop-common-test-template.xml" tofile="${basedir}/ivy/hadoop-common-test.xml"/>
-    <copy file="${basedir}/ivy/hadoop-common-${herriot.suffix}-template.xml"
-      tofile="${basedir}/ivy/hadoop-common-${herriot.suffix}.xml"/>
-    <replaceregexp byline="true">
-      <regexp pattern="@version"/>
-      <substitution expression="${version}"/>
-      <fileset dir="${basedir}/ivy">
-        <include name="hadoop-common.xml"/>
-        <include name="hadoop-common-test.xml"/>
-        <include name="hadoop-common-${herriot.suffix}.xml"/>
-      </fileset>
-    </replaceregexp>
-  </target>
-
-  <!-- ================================================================== -->
-  <!-- Perform audit activities for the release                           -->
-  <!-- ================================================================== -->
-  <target name="rats-taskdef" depends="ivy-retrieve-releaseaudit">
-     <typedef format="xml" resource="org/apache/rat/anttasks/antlib.xml" uri="antlib:org.apache.rat.anttasks"
-      classpathref="releaseaudit-classpath"/>
-  </target>
-
-  <target name="releaseaudit" depends="package, rats-taskdef" description="Release Audit activities">
-   <rat:report xmlns:rat="antlib:org.apache.rat.anttasks">
-      <fileset dir="${dist.dir}">
-        <exclude name="**/CHANGES.txt"/>
-        <exclude name="**/conf/*"/>
-        <exclude name="**/docs/"/>
-        <exclude name="lib/jdiff/"/>
-        <exclude name="**/native/*"/>
-        <exclude name="**/native/config/*"/>
-        <exclude name="**/native/m4/*"/>
-        <exclude name="**/VERSION"/>
-        <exclude name="**/*.json"/>
-        <exclude name="**/hod/*.txt"/>
-        <exclude name="src/test/empty-file" />
-      </fileset>
-    </rat:report>
-  </target>
-
-  <!-- ================================================================== -->
-  <!-- Clean.  Delete the build files, and their directories              -->
-  <!-- ================================================================== -->
-  <target name="clean" depends="clean-sign, clean-fi" description="Clean.  Delete the build files, and their directories">
-    <delete dir="${build.dir}"/>
-    <delete dir="${package.buildroot}"/>
-    <delete file="${basedir}/ivy/hadoop-common.xml"/>
-    <delete file="${basedir}/ivy/hadoop-common-pom.xml"/>
-    <delete file="${basedir}/ivy/hadoop-common-test.xml"/>
-    <delete file="${basedir}/ivy/hadoop-common-test-pom.xml"/>
-    <delete file="${basedir}/ivy/hadoop-common-${herriot.suffix}.xml"/>
-    <delete dir="${docs.src}/build"/>
-  </target>
-
-  <target name="clean-sign" description="Clean.  Delete .asc files">
-    <delete>
-      <fileset dir="." includes="**/**/*.asc"/>
-    </delete>
-  </target>  
-
-  <target name="veryclean" depends="clean" description="Delete mvn ant task jar and ivy ant taks jar">
-    <delete file="${ant_task.jar}"/>
-    <delete file="${ivy.jar}"/>
-  </target>
-
- <target name="clover" depends="clover.setup, clover.info" description="Instrument the Unit tests using Clover. 
-     To use, specify -Dclover.home=&lt;base of clover installation&gt; -Drun.clover=true on the command line."/>
-
-<target name="clover.setup" if="clover.enabled">
-   <taskdef resource="cloverlib.xml" classpath="${clover.jar}"/>
-   <mkdir dir="${clover.db.dir}"/>
-   <clover-setup initString="${clover.db.dir}/hadoop_coverage.db">
-     <fileset dir="${src.dir}" includes="java/**/*"/>
-     <testsources dir="${test.src.dir}"/>
-   </clover-setup>
-</target>
-
-<target name="clover.info" unless="clover.present">
-  <echo>
-     Clover not found. Code coverage reports disabled.
-  </echo>
-</target>
-
-<target name="clover.check">
-  <fail unless="clover.present">
-  ##################################################################
-   Clover not found.
-   Please specify -Dclover.home=&lt;base of clover installation&gt;
-   on the command line.
-  ##################################################################
-  </fail>
-</target>
-
-<target name="generate-clover-reports" depends="clover.check, clover">
-  <mkdir dir="${clover.report.dir}"/>
-  <clover-report>
-     <current outfile="${clover.report.dir}" title="${final.name}">
-     <format type="html"/>
-     </current>
-  </clover-report>
-  <clover-report>
-     <current outfile="${clover.report.dir}/clover.xml" title="${final.name}">
-     <format type="xml"/>
-     </current>
-  </clover-report>
-</target>
-
-<target name="findbugs.check" depends="check-for-findbugs" unless="findbugs.present">
-  <fail message="'findbugs.home' is not defined. Please pass -Dfindbugs.home=&lt;base of Findbugs installation&gt; to Ant on the command-line." />
-</target>
-
-<target name="patch.check" unless="patch.file">
-  <fail message="'patch.file' is not defined. Please pass -Dpatch.file=&lt;location of patch file&gt; to Ant on the command-line." />
-</target>
-
-<target name="test-patch" depends="patch.check,findbugs.check,forrest.check">
-  <exec executable="bash" failonerror="true">
-    <arg value="${basedir}/src/test/bin/test-patch.sh"/>
-    <arg value="DEVELOPER"/>
-    <arg value="${patch.file}"/>
-    <arg value="${scratch.dir}"/>
-    <arg value="${svn.cmd}"/>
-    <arg value="${grep.cmd}"/>
-    <arg value="${patch.cmd}"/>
-    <arg value="${findbugs.home}"/>
-    <arg value="${forrest.home}"/>
-    <arg value="${basedir}"/>
-  </exec>
-</target>
-
-<target name="hudson-test-patch" depends="findbugs.check,forrest.check">
-  <exec executable="bash" failonerror="true">
-    <arg value="${basedir}/src/test/bin/test-patch.sh"/>
-    <arg value="HUDSON"/>
-    <arg value="${scratch.dir}"/>
-    <arg value="${support.dir}"/>
-    <arg value="${ps.cmd}"/>
-    <arg value="${wget.cmd}"/>
-    <arg value="${jiracli.cmd}"/>
-    <arg value="${svn.cmd}"/>
-    <arg value="${grep.cmd}"/>
-    <arg value="${patch.cmd}"/>
-    <arg value="${findbugs.home}"/>
-    <arg value="${forrest.home}"/>
-    <arg value="${eclipse.home}"/>
-    <arg value="${basedir}"/>
-    <arg value="${jira.passwd}"/>
-    <arg value="${curl.cmd}"/>
-    <arg value="${defect}"/>
-  </exec>
-</target>
-	
-  <condition property="ant-eclipse.jar.exists">
-    <available file="${build.dir}/lib/ant-eclipse-1.0-jvm1.2.jar"/>
-  </condition>
-
-  <target name="ant-eclipse-download" unless="ant-eclipse.jar.exists"
-          description="Downloads the ant-eclipse binary.">
-    <get src="http://downloads.sourceforge.net/project/ant-eclipse/ant-eclipse/1.0/ant-eclipse-1.0.bin.tar.bz2"
-         dest="${build.dir}/ant-eclipse-1.0.bin.tar.bz2" usetimestamp="false" />
-
-    <untar src="${build.dir}/ant-eclipse-1.0.bin.tar.bz2"
-           dest="${build.dir}" compression="bzip2">
-      <patternset>
-        <include name="lib/ant-eclipse-1.0-jvm1.2.jar"/>
-      </patternset>
-    </untar>
-    <delete file="${build.dir}/ant-eclipse-1.0.bin.tar.bz2" />
-  </target>
-
-  <target name="eclipse" 
-          depends="init,ant-eclipse-download,ivy-retrieve-common,ivy-retrieve-test,compile-core-test"
-          description="Create eclipse project files">
-
-    <property environment="env"/>
-
-    <!-- Locate the tools.jar which is part of the JDK -->
-    <condition property="jdk.tools.jar" value="${env.JDK_HOME}/lib/tools.jar">
-      <available file="${env.JDK_HOME}/lib/tools.jar"/>
-    </condition>
-    <condition property="jdk.tools.jar" value="${env.JAVA_HOME}/lib/tools.jar">
-      <available file="${env.JAVA_HOME}/lib/tools.jar"/>
-    </condition>
-    <condition property="jdk.tools.jar" value="${java.home}/../lib/tools.jar">
-      <available file="${java.home}/../lib/tools.jar"/>
-    </condition>
-
-    <!-- The tools.jar from the JDK is called classes.jar on OS X. -->
-    <condition property="jdk.tools.jar" value="${java.home}/bundle/Classes/classes.jar">
-      <available file="${java.home}/bundle/Classes/classes.jar"/>
-    </condition>  	
-
-    <pathconvert property="eclipse.project">
-      <path path="${basedir}"/>
-      <regexpmapper from="^.*/([^/]+)$$" to="\1" handledirsep="yes"/>
-    </pathconvert>
-    <taskdef name="eclipse"
-             classname="prantl.ant.eclipse.EclipseTask"
-             classpath="${build.dir}/lib/ant-eclipse-1.0-jvm1.2.jar" />
-    <eclipse updatealways="true">
-      <project name="${eclipse.project}" />
-      <classpath>
-        <source path="${java.src.dir}"
-                output="${build.dir.eclipse-main-classes}" />
-        <source path="${test.src.dir}/core"
-                output="${build.dir.eclipse-test-classes}" />
-        <source path="${test.src.dir}/aop"
-                output="${build.dir.eclipse-test-classes}" />
-        <source path="${test.generated.dir}"
-                output="${build.dir.eclipse-test-generated-classes}" />
-        <output path="${build.dir.eclipse-main-classes}" />
-        <library pathref="ivy-common.classpath" exported="true" />
-        <library pathref="ivy-test.classpath" exported="false" />
-        <variable path="ANT_HOME/lib/ant.jar" exported="false" />
-        <library path="${conf.dir}" exported="false" />
-        <library path="${jdk.tools.jar}" exported="false" />
-      </classpath>
-    </eclipse>
-  </target>
-
-  <target name="ivy-init-dirs">
-    <mkdir dir="${build.ivy.dir}" />
-    <mkdir dir="${build.ivy.lib.dir}" />
-    <mkdir dir="${build.ivy.report.dir}" />
-    <mkdir dir="${build.ivy.maven.dir}" />
-  </target>
-
-  <target name="ivy-probe-antlib" >
-    <condition property="ivy.found">
-      <typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/>
-    </condition>
-  </target>
-
-  <target name="ivy-download" description="To download ivy" unless="offline">
-    <get src="${ivy_repo_url}" dest="${ivy.jar}" usetimestamp="true"/>
-  </target>
-
-  <!--
-  To avoid Ivy leaking things across big projects, always load Ivy in the same classloader.
-  Also note how we skip loading Ivy if it is already there, just to make sure all is well.
-  -->
-  <target name="ivy-init-antlib" depends="ivy-download,ivy-init-dirs,ivy-probe-antlib" unless="ivy.found">
-    <typedef uri="antlib:org.apache.ivy.ant" onerror="fail"
-      loaderRef="ivyLoader">
-      <classpath>
-        <pathelement location="${ivy.jar}"/>
-      </classpath>
-    </typedef>
-    <fail >
-      <condition >
-        <not>
-          <typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/>
-        </not>
-      </condition>
-      You need Apache Ivy 2.0 or later from http://ant.apache.org/
-      It could not be loaded from ${ivy_repo_url}
-    </fail>
-  </target>
-
-  <property name="ivyresolvelog" value="download-only"/>
-  <property name="ivyretrievelog" value="quiet"/>
-
-  <target name="ivy-init" depends="ivy-init-antlib" >
-
-    <!--Configure Ivy by reading in the settings file
-        If anyone has already read in a settings file into this settings ID, it gets priority
-    -->
-    <ivy:configure settingsid="${ant.project.name}.ivy.settings" file="${ivysettings.xml}" override='false'
-      realm="Sonatype Nexus Repository Manager"/>
-
-  </target>
-
-  <target name="ivy-resolve" depends="ivy-init">
-    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings"
-    	log="${ivyresolvelog}"/>
-  </target>
-
-  <target name="ivy-resolve-javadoc" depends="ivy-init">
-    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="javadoc"
-    	log="${ivyresolvelog}"/>
-  </target>
-
-  <target name="ivy-resolve-releaseaudit" depends="ivy-init">
-    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="releaseaudit"
-  		log="${ivyresolvelog}"/>
-  </target>
-
-  <target name="ivy-resolve-test" depends="ivy-init">
-    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="test"
-    	log="${ivyresolvelog}"/>
-  </target>
-
-  <target name="ivy-resolve-common" depends="ivy-init">
-    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="common"
-    	log="${ivyresolvelog}"/>
-  </target>
-
-  <target name="ivy-resolve-package" depends="ivy-init">
-    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="package"
-    	log="${ivyresolvelog}"/>
-  </target>
-
-  <target name="ivy-resolve-jdiff" depends="ivy-init">
-    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="jdiff"
-    	log="${ivyresolvelog}"/>
-  </target>
-
-  <target name="ivy-resolve-checkstyle" depends="ivy-init">
-    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="checkstyle"
-  		log="${ivyresolvelog}"/>
-  </target>
-
-  <target name="ivy-retrieve" depends="ivy-resolve"
-    description="Retrieve Ivy-managed artifacts">
-    <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
-      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
-    		log="${ivyretrievelog}"/>
-  </target>
-
-  <target name="ivy-retrieve-checkstyle" depends="ivy-resolve-checkstyle"
-    description="Retrieve Ivy-managed artifacts for the checkstyle configurations">
-    <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
-      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
-  			log="${ivyretrievelog}"/>
-    <ivy:cachepath pathid="checkstyle-classpath" conf="checkstyle"/>
-  </target>
-
-  <target name="ivy-retrieve-jdiff" depends="ivy-resolve-jdiff"
-    description="Retrieve Ivy-managed artifacts for the jdiff configurations">
-    <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
-      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
-  			log="${ivyretrievelog}"/>
-    <ivy:cachepath pathid="jdiff-classpath" conf="jdiff"/>
-  </target>
-
-  <target name="ivy-retrieve-javadoc" depends="ivy-resolve-javadoc"
-    description="Retrieve Ivy-managed artifacts for the javadoc configurations">
-    <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
-      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
-  			log="${ivyretrievelog}"/>
-    <ivy:cachepath pathid="javadoc-classpath" conf="javadoc"/>
-  </target>
-
-  <target name="ivy-retrieve-test" depends="ivy-resolve-test"
-    description="Retrieve Ivy-managed artifacts for the test configurations">
-    <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
-      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
-    		log="${ivyretrievelog}"/>
-    <ivy:cachepath pathid="ivy-test.classpath" conf="test"/>
-  </target>
-
-  <target name="ivy-retrieve-common" depends="ivy-resolve-common"
-    description="Retrieve Ivy-managed artifacts for the compile configurations">
-    <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
-      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
-    		log="${ivyretrievelog}"/>
-    <ivy:cachepath pathid="ivy-common.classpath" conf="common"/>
-  </target>
-
-  <target name="ivy-retrieve-package" depends="ivy-resolve-package"
-    description="Retrieve Ivy-managed artifacts for the package configurations">
-    <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
-      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
-    		log="${ivyretrievelog}"/>
-    <ivy:cachepath pathid="ivy-package.classpath" conf="package"/>
-  </target>
-
-  <target name="ivy-retrieve-releaseaudit" depends="ivy-resolve-releaseaudit"
-    description="Retrieve Ivy-managed artifacts for the compile configurations">
-    <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
-      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
-    		log="${ivyretrievelog}"/>
-    <ivy:cachepath pathid="releaseaudit-classpath" conf="releaseaudit"/>
-  </target>
-
-  <target name="ivy-report" depends="ivy-resolve-releaseaudit"
-    description="Generate">
-    <ivy:report todir="${build.ivy.report.dir}" settingsRef="${ant.project.name}.ivy.settings"/>
-    <echo>
-      Reports generated:${build.ivy.report.dir}
-    </echo>
-  </target>
-
-</project>

+ 0 - 24
common/conf/configuration.xsl

@@ -1,24 +0,0 @@
-<?xml version="1.0"?>
-<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0">
-<xsl:output method="html"/>
-<xsl:template match="configuration">
-<html>
-<body>
-<table border="1">
-<tr>
- <td>name</td>
- <td>value</td>
- <td>description</td>
-</tr>
-<xsl:for-each select="property">
-<tr>
-  <td><a name="{name}"><xsl:value-of select="name"/></a></td>
-  <td><xsl:value-of select="value"/></td>
-  <td><xsl:value-of select="description"/></td>
-</tr>
-</xsl:for-each>
-</table>
-</body>
-</html>
-</xsl:template>
-</xsl:stylesheet>

+ 0 - 8
common/conf/core-site.xml.template

@@ -1,8 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-
-<!-- Put site-specific property overrides in this file. -->
-
-<configuration>
-
-</configuration>

+ 0 - 66
common/conf/hadoop-env.sh.template

@@ -1,66 +0,0 @@
-# Set Hadoop-specific environment variables here.
-
-# The only required environment variable is JAVA_HOME.  All others are
-# optional.  When running a distributed configuration it is best to
-# set JAVA_HOME in this file, so that it is correctly defined on
-# remote nodes.
-
-# The java implementation to use.  Required.
-export JAVA_HOME=${JAVA_HOME}
-
-# Hadoop Installation Prefix
-HADOOP_PREFIX=${HADOOP_PREFIX}
-
-# Hadoop Configuration Directory
-HADOOP_CONF_DIR=${HADOOP_CONF_DIR}
-export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-$HADOOP_PREFIX/conf}
-
-# Extra Java CLASSPATH elements.  Optional.
-# export HADOOP_CLASSPATH="<extra_entries>:$HADOOP_CLASSPATH"
-
-# The maximum amount of heap to use, in MB. Default is 1000.
-# export HADOOP_HEAPSIZE=2000
-
-# Extra Java runtime options.  Empty by default.
-# if [ "$HADOOP_OPTS" == "" ]; then export HADOOP_OPTS=-server; else HADOOP_OPTS+=" -server"; fi
-
-# Command specific options appended to HADOOP_OPTS when specified
-export HADOOP_NAMENODE_OPTS="-Dcom.sun.management.jmxremote $HADOOP_NAMENODE_OPTS"
-export HADOOP_SECONDARYNAMENODE_OPTS="-Dcom.sun.management.jmxremote $HADOOP_SECONDARYNAMENODE_OPTS"
-export HADOOP_DATANODE_OPTS="-Dcom.sun.management.jmxremote $HADOOP_DATANODE_OPTS"
-export HADOOP_BALANCER_OPTS="-Dcom.sun.management.jmxremote $HADOOP_BALANCER_OPTS"
-export HADOOP_JOBTRACKER_OPTS="-Dcom.sun.management.jmxremote $HADOOP_JOBTRACKER_OPTS"
-export HADOOP_TASKTRACKER_OPTS="-Dcom.sun.management.jmxremote $HADOOP_TASKTRACKER_OPTS"
-# The following applies to multiple commands (fs, dfs, fsck, distcp etc)
-# export HADOOP_CLIENT_OPTS
-
-# Extra ssh options.  Empty by default.
-# export HADOOP_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HADOOP_CONF_DIR"
-
-# File naming remote slave hosts.  $HADOOP_PREFIX/conf/slaves by default.
-export HADOOP_SLAVES=${HADOOP_CONF_DIR}/slaves
-
-# host:path where hadoop code should be rsync'd from.  Unset by default.
-# export HADOOP_MASTER=master:/home/$USER/src/hadoop
-
-# Seconds to sleep between slave commands.  Unset by default.  This
-# can be useful in large clusters, where, e.g., slave rsyncs can
-# otherwise arrive faster than the master can service them.
-# export HADOOP_SLAVE_SLEEP=0.1
-
-# The directory where pid files are stored. /tmp by default.
-HADOOP_PID_DIR=${HADOOP_PID_DIR}
-export HADOOP_PID_DIR=${HADOOP_PID_DIR:-$HADOOP_PREFIX/var/run}
-
-# A string representing this instance of hadoop. $USER by default.
-export HADOOP_IDENT_STRING=`whoami`
-
-# The scheduling priority for daemon processes.  See 'man nice'.
-# export HADOOP_NICENESS=10
-
-# Allow Hadoop to run with sysctl net.ipv6.bindv6only = 1
-# export HADOOP_ALLOW_IPV6=yes
-
-# Where log files are stored.  $HADOOP_PREFIX/logs by default.
-HADOOP_LOG_DIR=${HADOOP_LOG_DIR}/$HADOOP_IDENT_STRING
-export HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-$HADOOP_PREFIX/var/log}

+ 0 - 106
common/conf/hadoop-policy.xml.template

@@ -1,106 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-
-<!-- Put site-specific property overrides in this file. -->
-
-<configuration>
-  <property>
-    <name>security.client.protocol.acl</name>
-    <value>*</value>
-    <description>ACL for ClientProtocol, which is used by user code 
-    via the DistributedFileSystem. 
-    The ACL is a comma-separated list of user and group names. The user and 
-    group list is separated by a blank. For e.g. "alice,bob users,wheel". 
-    A special value of "*" means all users are allowed.</description>
-  </property>
-
-  <property>
-    <name>security.client.datanode.protocol.acl</name>
-    <value>*</value>
-    <description>ACL for ClientDatanodeProtocol, the client-to-datanode protocol 
-    for block recovery.
-    The ACL is a comma-separated list of user and group names. The user and 
-    group list is separated by a blank. For e.g. "alice,bob users,wheel". 
-    A special value of "*" means all users are allowed.</description>
-  </property>
-
-  <property>
-    <name>security.datanode.protocol.acl</name>
-    <value>*</value>
-    <description>ACL for DatanodeProtocol, which is used by datanodes to 
-    communicate with the namenode.
-    The ACL is a comma-separated list of user and group names. The user and 
-    group list is separated by a blank. For e.g. "alice,bob users,wheel". 
-    A special value of "*" means all users are allowed.</description>
-  </property>
-
-  <property>
-    <name>security.inter.datanode.protocol.acl</name>
-    <value>*</value>
-    <description>ACL for InterDatanodeProtocol, the inter-datanode protocol
-    for updating generation timestamp.
-    The ACL is a comma-separated list of user and group names. The user and 
-    group list is separated by a blank. For e.g. "alice,bob users,wheel". 
-    A special value of "*" means all users are allowed.</description>
-  </property>
-
-  <property>
-    <name>security.namenode.protocol.acl</name>
-    <value>*</value>
-    <description>ACL for NamenodeProtocol, the protocol used by the secondary
-    namenode to communicate with the namenode.
-    The ACL is a comma-separated list of user and group names. The user and 
-    group list is separated by a blank. For e.g. "alice,bob users,wheel". 
-    A special value of "*" means all users are allowed.</description>
-  </property>
-
-  <property>
-    <name>security.inter.tracker.protocol.acl</name>
-    <value>*</value>
-    <description>ACL for InterTrackerProtocol, used by the tasktrackers to 
-    communicate with the jobtracker.
-    The ACL is a comma-separated list of user and group names. The user and 
-    group list is separated by a blank. For e.g. "alice,bob users,wheel". 
-    A special value of "*" means all users are allowed.</description>
-  </property>
-
-  <property>
-    <name>security.job.submission.protocol.acl</name>
-    <value>*</value>
-    <description>ACL for JobSubmissionProtocol, used by job clients to 
-    communciate with the jobtracker for job submission, querying job status etc.
-    The ACL is a comma-separated list of user and group names. The user and 
-    group list is separated by a blank. For e.g. "alice,bob users,wheel". 
-    A special value of "*" means all users are allowed.</description>
-  </property>
-
-  <property>
-    <name>security.task.umbilical.protocol.acl</name>
-    <value>*</value>
-    <description>ACL for TaskUmbilicalProtocol, used by the map and reduce 
-    tasks to communicate with the parent tasktracker. 
-    The ACL is a comma-separated list of user and group names. The user and 
-    group list is separated by a blank. For e.g. "alice,bob users,wheel". 
-    A special value of "*" means all users are allowed.</description>
-  </property>
-
-  <property>
-    <name>security.refresh.policy.protocol.acl</name>
-    <value>*</value>
-    <description>ACL for RefreshAuthorizationPolicyProtocol, used by the 
-    dfsadmin and mradmin commands to refresh the security policy in-effect. 
-    The ACL is a comma-separated list of user and group names. The user and 
-    group list is separated by a blank. For e.g. "alice,bob users,wheel". 
-    A special value of "*" means all users are allowed.</description>
-  </property>
-
-  <property>
-    <name>security.admin.operations.protocol.acl</name>
-    <value>*</value>
-    <description>ACL for AdminOperationsProtocol, used by the mradmins commands
-    to refresh queues and nodes at JobTracker. The ACL is a comma-separated list of 
-    user and group names. The user and group list is separated by a blank. 
-    For e.g. "alice,bob users,wheel". A special value of "*" means all users are 
-    allowed.</description>
-  </property>
-</configuration>

+ 0 - 1
common/conf/masters.template

@@ -1 +0,0 @@
-localhost

+ 0 - 2
common/conf/slaves.template

@@ -1,2 +0,0 @@
-# Specify multiple slaves, one per each line. 
-localhost

+ 0 - 331
common/ivy.xml

@@ -1,331 +0,0 @@
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-
-<ivy-module version="1.0">
-  <info organisation="org.apache.hadoop" module="${ant.project.name}" revision="${version}">
-    <license name="Apache 2.0"/>
-    <ivyauthor name="Apache Hadoop Team" url="http://hadoop.apache.org"/>
-    <description>
-        Hadoop Common
-    </description>
-  </info>
-  <configurations defaultconfmapping="default">
-    <!--these match the Maven configurations-->
-    <conf name="default" extends="master,runtime"/>
-    <conf name="master" description="contains the artifact but no dependencies"/>
-    <conf name="runtime" description="runtime but not the artifact"
-      extends="client,server,s3-server,kfs,mandatory,jetty,ftp"/>
-
-    <conf name="mandatory" description="contains the critical  dependencies"
-      extends="commons-logging,log4j"/>
-
-    <!--
-    These public configurations contain the core dependencies for running hadoop client or server.
-    The server is effectively a superset of the client.
-    -->
-    <conf name="client" description="client-side dependencies"
-      extends="mandatory,httpclient"/>
-    <conf name="server" description="server-side dependencies"
-      extends="client"/>
-    <conf name="s3-client" description="dependencies for working with S3/EC2 infrastructure"
-      extends="client"/>
-    <conf name="s3-server" description="dependencies for running on S3/EC2 infrastructure"
-      extends="s3-client,server"/>
-    <conf name="kfs" description="dependencies for KFS file system support"/>
-    <conf name="ftp" description="dependencies for workign with FTP filesytems" 
-              extends="mandatory"/>
-   <conf name="jetty" description="Jetty provides the in-VM HTTP daemon" extends="commons-logging"/>
-
-   <conf name="common" extends="runtime,mandatory,httpclient,ftp,jetty,jdiff"
-		      description="common artifacts"/>
-    <!--Testing pulls in everything-->
-   <conf name="test" extends="master" description="the classpath needed to run tests"/>
-
-    <!--Packaging pulls in everything-->
-   <conf name="package" extends="master" description="the classpath needed for packaging"/>
-
-    <!--Private configurations. -->
-
-    <conf name="javadoc" visibility="private" description="artiracts required while performing doc generation"
-      extends="common,mandatory,jetty,lucene"/>
-
-    <conf name="releaseaudit" visibility="private"
-	description="Artifacts required for releaseaudit target"/>
-     
-    <conf name="commons-logging" visibility="private"/>
-    <conf name="httpclient" visibility="private" extends="commons-logging"/>
-    <conf name="log4j" visibility="private"/>
-    <conf name="lucene" visibility="private"/>
-    <conf name="jdiff" visibility="private" extends="log4j,s3-client,jetty,server"/>
-    <conf name="checkstyle" visibility="private"/>
-
-  </configurations>
-
-  <publications>
-    <!--get the artifact from our module name-->
-    <artifact conf="master"/>
-  </publications>
-  <dependencies>
-
- <!--used client side-->
-    <dependency org="commons-cli"
-      name="commons-cli"
-      rev="${commons-cli.version}"
-      conf="client->default"/>
-    <dependency org="checkstyle"
-      name="checkstyle"
-      rev="${checkstyle.version}"
-      conf="checkstyle->default"/>
-    <dependency org="jdiff"
-      name="jdiff"
-      rev="${jdiff.version}"
-      conf="jdiff->default"/>
-
-    <dependency org="xmlenc"
-      name="xmlenc"
-      rev="${xmlenc.version}"
-      conf="server->default"/>
-
-    <!--Configuration: httpclient-->
-
-    <!--
-    commons-httpclient asks for too many files.
-    All it needs is commons-codec and commons-logging JARs
-    -->
-    <dependency org="commons-httpclient"
-      name="commons-httpclient"
-      rev="${commons-httpclient.version}"
-      conf="httpclient->master">
-    </dependency>
-
-    <dependency org="commons-codec"
-      name="commons-codec"
-      rev="${commons-codec.version}"
-      conf="httpclient->default"/>
-
-    <dependency org="commons-net"
-      name="commons-net"
-      rev="${commons-net.version}"
-      conf="ftp->default"/>
-
-    <!--Configuration: Jetty -->
-
-<!-- <dependency org="javax.servlet"
-      name="servlet-api"
-      rev="${servlet-api.version}"
-      conf="jetty->master"/>   -->
-    <dependency org="org.mortbay.jetty"
-      name="jetty"
-      rev="${jetty.version}"
-      conf="jetty->master"/>
-    <dependency org="org.mortbay.jetty"
-      name="jetty-util"
-      rev="${jetty-util.version}"
-      conf="jetty->master"/>
-
-    <dependency org="tomcat"
-      name="jasper-runtime"
-      rev="${jasper.version}"
-      conf="jetty->master"/>
-    <dependency org="tomcat"
-      name="jasper-compiler"
-      rev="${jasper.version}"
-      conf="jetty->master"/>
-    <dependency org="org.mortbay.jetty"
-      name="jsp-api-2.1"
-      rev="${jetty.version}"
-      conf="jetty->master"/>
-    <dependency org="org.mortbay.jetty"
-      name="jsp-2.1"
-      rev="${jetty.version}"
-      conf="jetty->master"/>
-    <dependency org="commons-el"
-      name="commons-el"
-      rev="${commons-el.version}"
-      conf="jetty->master"/>
-
-
-    <!--Configuration: commons-logging -->
-
-    <!--it is essential that only the master JAR of commons logging
-    is pulled in, as its dependencies are usually a mess, including things
-    like out of date servlet APIs, bits of Avalon, etc.
-    -->
-    <dependency org="commons-logging"
-      name="commons-logging"
-      rev="${commons-logging.version}"
-      conf="commons-logging->master"/>
-
-
-    <!--Configuration: commons-logging -->
-
-    <!--log4J is not optional until commons-logging.properties is stripped out of the JAR -->
-    <dependency org="log4j"
-      name="log4j"
-      rev="${log4j.version}"
-      conf="log4j->master"/>
-
-    <!--Configuration: s3-client -->
-    <!--there are two jets3t projects in the repository; this one goes up to 0.6 and
-    is assumed to be the live one-->
-    <dependency org="net.java.dev.jets3t"
-      name="jets3t"
-      rev="${jets3t.version}"
-      conf="s3-client->master"/>
-    <dependency org="commons-net"
-      name="commons-net"
-      rev="${commons-net.version}"
-      conf="s3-client->master"/> 
-    <dependency org="org.mortbay.jetty"
-      name="servlet-api-2.5"
-      rev="${servlet-api-2.5.version}"
-      conf="s3-client->master"/>
-    <dependency org="net.sf.kosmosfs"
-      name="kfs"
-      rev="${kfs.version}"
-      conf="kfs->default"/>
-
-    <!--Configuration: test -->
-    <!--artifacts needed for testing -->
-
-    <dependency org="org.apache.ftpserver"
-      name="ftplet-api"
-      rev="${ftplet-api.version}"
-      conf="test->default"/>
-    <dependency org="org.apache.mina"
-      name="mina-core"
-      rev="${mina-core.version}"
-      conf="test->default"/>
-    <dependency org="org.apache.ftpserver"
-      name="ftpserver-core"
-      rev="${ftpserver-core.version}"
-      conf="test->default"/>
-    <dependency org="org.apache.ftpserver"
-      name="ftpserver-deprecated"
-      rev="${ftpserver-deprecated.version}"
-      conf="test->default"/>
-
-    <dependency org="junit"
-      name="junit"
-      rev="${junit.version}"
-      conf="test->default"/>
-    <dependency org="org.apache.rat"
-      name="apache-rat-tasks"
-      rev="${rats-lib.version}"
-      conf="releaseaudit->default"/>
-    <dependency org="commons-lang"
-      name="commons-lang"
-      rev="${commons-lang.version}"
-      conf="releaseaudit->default"/>
-    <dependency org="commons-collections"
-      name="commons-collections"
-      rev="${commons-collections.version}"
-      conf="releaseaudit->default"/>
-    <dependency org="hsqldb"
-      name="hsqldb"
-      rev="${hsqldb.version}"
-      conf="common->default"/>
-    <dependency org="org.apache.lucene"
-      name="lucene-core"
-      rev="${lucene-core.version}"
-      conf="javadoc->default"/> 
-    <dependency org="commons-logging"
-      name="commons-logging-api"
-      rev="${commons-logging-api.version}"
-      conf="common->default"/>
-    <dependency org="org.slf4j"
-      name="slf4j-api"
-      rev="${slf4j-api.version}"
-      conf="common->default"/>
-    <dependency org="org.eclipse.jdt"
-      name="core"
-      rev="${core.version}"
-      conf="common->master"/>
-    <dependency org="oro"
-      name="oro"
-      rev="${oro.version}"
-      conf="common->default"/>
-    <dependency org="org.slf4j"
-      name="slf4j-log4j12"
-      rev="${slf4j-log4j12.version}"
-      conf="common->master">
-    </dependency>
-    <dependency org="org.apache.hadoop"
-      name="avro"
-      rev="${avro.version}"
-      conf="common->default">
-      <exclude module="ant"/>
-      <exclude module="jetty"/>
-      <exclude module="slf4j-simple"/>
-    </dependency>
-    <dependency org="org.codehaus.jackson"
-      name="jackson-mapper-asl"
-      rev="${jackson.version}"
-      conf="common->default"/> 
-    <dependency org="com.thoughtworks.paranamer"
-      name="paranamer"
-      rev="${paranamer.version}"
-      conf="common->default"/>
-    <dependency org="com.thoughtworks.paranamer"
-      name="paranamer-ant"
-      rev="${paranamer.version}"
-      conf="common->default"/>
-    <dependency org="org.aspectj"
-      name="aspectjrt"
-      rev="${aspectj.version}"
-      conf="common->default">
-    </dependency>
-    <dependency org="org.aspectj"
-      name="aspectjtools"
-      rev="${aspectj.version}"
-      conf="common->default">
-    </dependency>
-    <dependency org="org.mockito" 
-      name="mockito-all" 
-      rev="${mockito-all.version}" 
-      conf="test->default">
-    </dependency> 
-    <dependency org="com.jcraft"
-      name="jsch"
-      rev="${jsch.version}"
-      conf="common->default">
-    </dependency>
-    <!--Configuration: package -->
-    <!--artifacts needed for packaging -->
-    <dependency org="org.vafer" 
-      name="jdeb" 
-      rev="${jdeb.version}"
-      conf="package->master">
-    </dependency>
-    <dependency org="commons-configuration"
-      name="commons-configuration"
-      rev="${commons-configuration.version}"
-      conf="common->default"/>
-    <dependency org="org.apache.commons"
-      name="commons-math"
-      rev="${commons-math.version}"
-      conf="common->default"/>
-    <dependency org="com.google.guava"
-      name="guava"
-      rev="${guava.version}"
-      conf="common->default"/>
-    <dependency org="com.google.protobuf"
-      name="protobuf-java"
-      rev="${protobuf.version}"
-      conf="common->default"/>
-  </dependencies>
-</ivy-module>

+ 0 - 139
common/ivy/hadoop-common-instrumented-template.xml

@@ -1,139 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>org.apache.hadoop</groupId>
-  <artifactId>hadoop-common-instrumented</artifactId>
-  <packaging>jar</packaging>
-  <version>@version</version>
-  <dependencies>
-   <dependency>
-      <groupId>commons-cli</groupId>
-      <artifactId>commons-cli</artifactId>
-      <version>1.2</version>
-    </dependency>
-   <dependency>
-      <groupId>xmlenc</groupId>
-      <artifactId>xmlenc</artifactId>
-      <version>0.52</version>
-    </dependency>
-    <dependency>
-      <groupId>commons-httpclient</groupId>
-      <artifactId>commons-httpclient</artifactId>
-      <version>3.1</version>
-    </dependency>
-    <dependency>
-      <groupId>commons-codec</groupId>
-      <artifactId>commons-codec</artifactId>
-      <version>1.4</version>
-    </dependency>
-    <dependency>
-      <groupId>commons-net</groupId>
-      <artifactId>commons-net</artifactId>
-      <version>1.4.1</version>
-    </dependency>
-    <dependency>
-      <groupId>org.mortbay.jetty</groupId>
-      <artifactId>jetty</artifactId>
-      <version>6.1.14</version>
-    </dependency>
-    <dependency>
-      <groupId>org.mortbay.jetty</groupId>
-      <artifactId>jetty-util</artifactId>
-      <version>6.1.14</version>
-    </dependency>
-    <dependency>
-      <groupId>tomcat</groupId>
-      <artifactId>jasper-runtime</artifactId>
-      <version>5.5.12</version>
-    </dependency>
-    <dependency>
-      <groupId>tomcat</groupId>
-      <artifactId>jasper-compiler</artifactId>
-      <version>5.5.12</version>
-    </dependency>
-    <dependency>
-      <groupId>org.mortbay.jetty</groupId>
-      <artifactId>jsp-api-2.1</artifactId>
-      <version>6.1.14</version>
-    </dependency>
-    <dependency>
-      <groupId>org.mortbay.jetty</groupId>
-      <artifactId>jsp-2.1</artifactId>
-      <version>6.1.14</version>
-    </dependency>
-    <dependency>
-      <groupId>commons-el</groupId>
-      <artifactId>commons-el</artifactId>
-      <version>1.0</version>
-    </dependency>
-    <dependency>
-      <groupId>net.java.dev.jets3t</groupId>
-      <artifactId>jets3t</artifactId>
-      <version>0.7.1</version>
-    </dependency>
-    <dependency>
-      <groupId>commons-net</groupId>
-      <artifactId>commons-net</artifactId>
-      <version>1.4.1</version>
-    </dependency>
-    <dependency>
-      <groupId>org.mortbay.jetty</groupId>
-      <artifactId>servlet-api-2.5</artifactId>
-      <version>6.1.14</version>
-    </dependency>
-    <dependency>
-      <groupId>net.sf.kosmosfs</groupId>
-      <artifactId>kfs</artifactId>
-      <version>0.3</version>
-    </dependency>
-    <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <version>4.8.1</version>
-    </dependency>
-    <dependency>
-      <groupId>hsqldb</groupId>
-      <artifactId>hsqldb</artifactId>
-      <version>1.8.0.10</version>
-    </dependency>
-    <dependency>
-      <groupId>oro</groupId>
-      <artifactId>oro</artifactId>
-      <version>2.0.8</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>avro</artifactId>
-      <version>1.3.2</version>
-      <exclusions>
-        <exclusion>
-          <!-- Don't pull in Avro's (later) version of Jetty.-->
-          <groupId>org.mortbay.jetty</groupId>
-          <artifactId>jetty</artifactId>
-        </exclusion>
-        <exclusion>
-          <!-- Exclude Avro's version of ant since it conflicts with Jetty's.-->
-          <groupId>org.apache.ant</groupId>
-          <artifactId>ant</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-  </dependencies>
-</project>

+ 0 - 159
common/ivy/hadoop-common-template.xml

@@ -1,159 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>org.apache.hadoop</groupId>
-  <artifactId>hadoop-common</artifactId>
-  <packaging>jar</packaging>
-  <version>@version</version>
-  <dependencies>
-   <dependency>
-      <groupId>commons-cli</groupId>
-      <artifactId>commons-cli</artifactId>
-      <version>1.2</version>
-    </dependency>
-   <dependency>
-      <groupId>xmlenc</groupId>
-      <artifactId>xmlenc</artifactId>
-      <version>0.52</version>
-    </dependency>
-    <dependency>
-      <groupId>commons-httpclient</groupId>
-      <artifactId>commons-httpclient</artifactId>
-      <version>3.1</version>
-    </dependency>
-    <dependency>
-      <groupId>commons-codec</groupId>
-      <artifactId>commons-codec</artifactId>
-      <version>1.4</version>
-    </dependency>
-    <dependency>
-      <groupId>commons-net</groupId>
-      <artifactId>commons-net</artifactId>
-      <version>1.4.1</version>
-    </dependency>
-    <dependency>
-      <groupId>org.mortbay.jetty</groupId>
-      <artifactId>jetty</artifactId>
-      <version>6.1.14</version>
-    </dependency>
-    <dependency>
-      <groupId>org.mortbay.jetty</groupId>
-      <artifactId>jetty-util</artifactId>
-      <version>6.1.14</version>
-    </dependency>
-    <dependency>
-      <groupId>tomcat</groupId>
-      <artifactId>jasper-runtime</artifactId>
-      <version>5.5.12</version>
-    </dependency>
-    <dependency>
-      <groupId>tomcat</groupId>
-      <artifactId>jasper-compiler</artifactId>
-      <version>5.5.12</version>
-    </dependency>
-    <dependency>
-      <groupId>org.mortbay.jetty</groupId>
-      <artifactId>jsp-api-2.1</artifactId>
-      <version>6.1.14</version>
-    </dependency>
-    <dependency>
-      <groupId>org.mortbay.jetty</groupId>
-      <artifactId>jsp-2.1</artifactId>
-      <version>6.1.14</version>
-    </dependency>
-    <dependency>
-      <groupId>commons-el</groupId>
-      <artifactId>commons-el</artifactId>
-      <version>1.0</version>
-    </dependency>
-    <dependency>
-      <groupId>net.java.dev.jets3t</groupId>
-      <artifactId>jets3t</artifactId>
-      <version>0.7.1</version>
-    </dependency>
-    <dependency>
-      <groupId>commons-net</groupId>
-      <artifactId>commons-net</artifactId>
-      <version>1.4.1</version>
-    </dependency>
-    <dependency>
-      <groupId>org.mortbay.jetty</groupId>
-      <artifactId>servlet-api-2.5</artifactId>
-      <version>6.1.14</version>
-    </dependency>
-    <dependency>
-      <groupId>net.sf.kosmosfs</groupId>
-      <artifactId>kfs</artifactId>
-      <version>0.3</version>
-    </dependency>
-    <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <version>4.8.1</version>
-    </dependency>
-    <dependency>
-      <groupId>hsqldb</groupId>
-      <artifactId>hsqldb</artifactId>
-      <version>1.8.0.10</version>
-    </dependency>
-    <dependency>
-      <groupId>oro</groupId>
-      <artifactId>oro</artifactId>
-      <version>2.0.8</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>avro</artifactId>
-      <version>1.3.2</version>
-      <exclusions>
-        <exclusion>
-          <!-- Don't pull in Avro's (later) version of Jetty.-->
-          <groupId>org.mortbay.jetty</groupId>
-          <artifactId>jetty</artifactId>
-        </exclusion>
-        <exclusion>
-          <!-- Exclude Avro's version of ant since it conflicts with Jetty's.-->
-          <groupId>org.apache.ant</groupId>
-          <artifactId>ant</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>commons-configuration</groupId>
-      <artifactId>commons-configuration</artifactId>
-      <version>1.6</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.commons</groupId>
-      <artifactId>commons-math</artifactId>
-      <version>2.1</version>
-    </dependency>
-    <dependency>
-      <groupId>com.google.guava</groupId>
-      <artifactId>guava</artifactId>
-      <version>r09</version>
-    </dependency>
-    <dependency>
-      <groupId>com.google.protobuf</groupId>
-      <artifactId>protobuf-java</artifactId>
-      <version>2.4.0a</version>
-    </dependency>
-  </dependencies>
-</project>

+ 0 - 58
common/ivy/hadoop-common-test-template.xml

@@ -1,58 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>org.apache.hadoop</groupId>
-  <artifactId>hadoop-common-test</artifactId>
-  <packaging>jar</packaging>
-  <version>@version</version>
-  <dependencies>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-common</artifactId>
-      <version>@version</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.ftpserver</groupId>
-      <artifactId>ftplet-api</artifactId>
-      <version>1.0.0</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.mina</groupId>
-      <artifactId>mina-core</artifactId>
-      <version>2.0.0-M5</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.ftpserver</groupId>
-      <artifactId>ftpserver-core</artifactId>
-      <version>1.0.0</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.ftpserver</groupId>
-      <artifactId>ftpserver-deprecated</artifactId>
-      <version>1.0.0-M2</version>
-    </dependency>
-    <dependency>
-      <groupId>org.mockito</groupId>
-      <artifactId>mockito-all</artifactId>
-      <version>1.8.5</version>
-    </dependency>
-  </dependencies>
-</project>

+ 0 - 50
common/ivy/ivysettings.xml

@@ -1,50 +0,0 @@
-<ivysettings>
- <!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-
-  <property name="repo.maven.org" value="http://repo1.maven.org/maven2/" override="false"/>
-
-  <property name="maven2.pattern" value="[organisation]/[module]/[revision]/[module]-[revision]"/>
-  <property name="repo.dir" value="${user.home}/.m2/repository"/>
-      <!-- pull in the local repository -->
- <include url="${ivy.default.conf.dir}/ivyconf-local.xml"/> 
-
- <property name="resolvers" value="default" override="false"/>
-  <property name="force-resolve" value="false" override="false"/>
- <settings defaultResolver="${resolvers}"/>
-
- <resolvers>
-   <!--ibiblio resolvers-->
-    <ibiblio name="maven2" root="${repo.maven.org}" m2compatible="true"/>
-
-    <filesystem name="fs" m2compatible="true" force="${force-resolve}">
-       <artifact pattern="${repo.dir}/${maven2.pattern}.[ext]"/>
-       <ivy pattern="${repo.dir}/${maven2.pattern}.pom"/>
-    </filesystem>
-
-    <chain name="default" dual="true">
-      <resolver ref="maven2"/>
-    </chain>
-
-    <chain name="internal" dual="true">
-      <resolver ref="fs"/>
-      <resolver ref="maven2"/>
-    </chain>
-
-  </resolvers>
-
-</ivysettings>

+ 0 - 90
common/ivy/libraries.properties

@@ -1,90 +0,0 @@
-#   Licensed under the Apache License, Version 2.0 (the "License");
-#   you may not use this file except in compliance with the License.
-#   You may obtain a copy of the License at
-#
-#       http://www.apache.org/licenses/LICENSE-2.0
-#
-#   Unless required by applicable law or agreed to in writing, software
-#   distributed under the License is distributed on an "AS IS" BASIS,
-#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#   See the License for the specific language governing permissions and
-#   limitations under the License.
-
-#This properties file lists the versions of the various artifacts used by hadoop and components.
-#It drives ivy and the generation of a maven POM
-
-#These are the versions of our dependencies (in alphabetical order)
-ant-task.version=2.0.10
-
-avro.version=1.3.2
-
-checkstyle.version=4.2
-
-commons-cli.version=1.2
-commons-cli2.version=2.0-mahout
-commons-codec.version=1.4
-commons-collections.version=3.1
-commons-configuration.version=1.6
-commons-httpclient.version=3.1
-commons-lang.version=2.5
-commons-logging.version=1.1.1
-commons-logging-api.version=1.1
-commons-el.version=1.0
-commons-fileupload.version=1.2
-commons-io.version=1.4
-commons-math.version=2.1
-commons-net.version=1.4.1
-core.version=3.1.1
-coreplugin.version=1.3.2
-
-ftplet-api.version=1.0.0
-ftpserver-core.version=1.0.0
-ftpserver-deprecated.version=1.0.0-M2
-
-guava.version=r09
-
-hsqldb.version=1.8.0.10
-
-ivy.version=2.1.0
-
-jasper.version=5.5.12
-jdeb.version=0.8
-jsp.version=2.1
-jsp-api.version=5.5.12
-jets3t.version=0.7.1
-jetty.version=6.1.14
-jetty-util.version=6.1.14
-junit.version=4.8.1
-jdiff.version=1.0.9
-json.version=1.0
-
-kfs.version=0.3
-
-log4j.version=1.2.15
-lucene-core.version=2.3.1
-
-mina-core.version=2.0.0-M5
-
-oro.version=2.0.8
-
-protobuf.version=2.4.0a
-
-rats-lib.version=0.6
-
-servlet.version=4.0.6
-servlet-api-2.5.version=6.1.14
-servlet-api.version=2.5
-slf4j-api.version=1.5.11
-slf4j-log4j12.version=1.5.11
-
-wagon-http.version=1.0-beta-2
-
-xmlenc.version=0.52
-xerces.version=1.4.4
-
-aspectj.version=1.6.5
-
-mockito-all.version=1.8.5
-
-jsch.version=0.1.42
-

+ 0 - 86
common/src/fixFontsPath.sh

@@ -1,86 +0,0 @@
-#!/bin/sh
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# This file is used to fix the paths in CNDOCS_SRC/uming.conf, CNDOCS_SRC/src/documentation/sitemap.xmap 
-
-CNDOCS_SRC=$1
-
-cat <<EOF > src/docs/cn/uming.conf
-<?xml version="1.0"?>
-<configuration>
-  <fonts>
-    <font metrics-file="$CNDOCS_SRC/uming.xml" kerning="yes" embed-file="$CNDOCS_SRC/uming.ttc">
-      <font-triplet name="AR PL UMing" style="normal" weight="normal"/>
-      <font-triplet name="AR PL UMing" style="italic" weight="normal"/>
-      <font-triplet name="AR PL UMing" style="normal" weight="bold"/>
-      <font-triplet name="AR PL UMing" style="italic" weight="bold"/>
-    </font>
-  </fonts>
-</configuration>
-EOF
-
-cat <<EOF > src/docs/cn/src/documentation/sitemap.xmap
-<?xml version="1.0"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<map:sitemap xmlns:map="http://apache.org/cocoon/sitemap/1.0">
-  <map:components>
-    <map:serializers default="fo2pdf">
-      <map:serializer name="fo2pdf"
-                src="org.apache.cocoon.serialization.FOPSerializer"
-                mime-type="application/pdf">
-        <user-config src="$CNDOCS_SRC/uming.conf"/>
-        </map:serializer>
-    </map:serializers>
-  </map:components>
-  <map:pipelines>
-    <map:pipeline>
-<!-- generate .pdf files from .fo -->
-      <map:match type="regexp" pattern="^(.*?)([^/]*).pdf$">
-        <map:select type="exists">
-          <map:when test="{lm:project.{1}{2}.pdf}">
-            <map:read src="{lm:project.{1}{2}.pdf}"/>
-          </map:when>
-          <map:when test="{lm:project.{1}{2}.fo}">
-            <map:generate src="{lm:project.{1}{2}.fo}"/>
-            <map:serialize type="fo2pdf"/>
-          </map:when>
-          <map:otherwise>
-            <map:generate src="cocoon://{1}{2}.fo"/>
-            <map:serialize type="fo2pdf"/>
-          </map:otherwise>
-        </map:select>
-      </map:match>
-    </map:pipeline>
-  </map:pipelines>
-</map:sitemap>
-EOF

+ 0 - 78
common/src/native/packageNativeHadoop.sh

@@ -1,78 +0,0 @@
-#!/bin/sh
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# packageNativeHadoop.sh - A simple script to help package native-hadoop libraries
-
-#
-# Note: 
-# This script relies on the following environment variables to function correctly:
-#  * BASE_NATIVE_LIB_DIR
-#  * BUILD_NATIVE_DIR
-#  * DIST_LIB_DIR
-# All these are setup by build.xml.
-#
-
-TAR='tar cf -'
-UNTAR='tar xfBp -'
-
-# Copy the pre-built libraries in $BASE_NATIVE_LIB_DIR
-if [ -d $BASE_NATIVE_LIB_DIR ]
-then
-  for platform in `ls $BASE_NATIVE_LIB_DIR`
-  do
-    if [ ! -d $DIST_LIB_DIR ]
-    then
-      mkdir -p $DIST_LIB_DIR
-      echo "Created $DIST_LIB_DIR"
-    fi
-    echo "Copying libraries in $BASE_NATIVE_LIB_DIR/$platform to $DIST_LIB_DIR/"
-    cd $BASE_NATIVE_LIB_DIR/
-    $TAR . | (cd $DIST_LIB_DIR/; $UNTAR)
-  done
-fi
-
-# Copy the custom-built libraries in $BUILD_DIR
-if [ -d $BUILD_NATIVE_DIR ]
-then 
-  for platform in `ls $BUILD_NATIVE_DIR`
-  do
-    if [ ! -d $DIST_LIB_DIR ]
-    then
-      mkdir -p $DIST_LIB_DIR
-      echo "Created $DIST_LIB_DIR"
-    fi
-    echo "Copying libraries in $BUILD_NATIVE_DIR/$platform/lib to $DIST_LIB_DIR/"
-    cd $BUILD_NATIVE_DIR/$platform/lib
-    $TAR . | (cd $DIST_LIB_DIR/; $UNTAR)
-  done  
-fi
-
-if [ "${BUNDLE_SNAPPY_LIB}" = "true" ]
-then
- if [ -d ${SNAPPY_LIB_DIR} ]
- then
-   echo "Copying Snappy library in ${SNAPPY_LIB_DIR} to $DIST_LIB_DIR/"
-   cd ${SNAPPY_LIB_DIR}
-   $TAR . | (cd $DIST_LIB_DIR/; $UNTAR)
- else
-   echo "Snappy lib directory ${SNAPPY_LIB_DIR} does not exist"
-   exit 1
- fi
-fi
-
-#vim: ts=2: sw=2: et

+ 0 - 11
common/src/packages/templates/conf/core-site.xml

@@ -1,11 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-
-<!-- Put site-specific property overrides in this file. -->
-
-<configuration>
-  <property>
-    <name>fs.default.name</name>
-    <value>${HADOOP_NN_HOST}</value>
-  </property>
-</configuration>

+ 0 - 0
common/src/test/bin/smart-apply-patch.sh → dev-support/smart-apply-patch.sh


+ 3 - 33
common/.gitignore → dev-support/test-patch.properties

@@ -13,36 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-*~
-.classpath
-.project
-.settings
-*.iml
-*.ipr
-*.iws
-.idea
-.svn
-build/
-build-fi/
-build.properties
-conf/masters
-conf/slaves
-conf/hadoop-env.sh
-conf/hadoop-site.xml
-conf/core-site.xml
-conf/mapred-site.xml
-conf/hdfs-site.xml
-conf/hadoop-policy.xml
-conf/capacity-scheduler.xml
-conf/mapred-queue-acls.xml
-docs/api/
-ivy/hadoop-core.xml
-ivy/hadoop-core-test.xml
-ivy/ivy-*.jar
-ivy/maven-ant-tasks-*.jar
-logs/
-src/contrib/ec2/bin/hadoop-ec2-env.sh
-src/docs/build
-src/docs/cn/build
-src/docs/cn/src/documentation/sitemap.xmap
-src/docs/cn/uming.conf
+OK_RELEASEAUDIT_WARNINGS=0
+OK_FINDBUGS_WARNINGS=0
+OK_JAVADOC_WARNINGS=0

+ 43 - 29
common/src/test/bin/test-patch.sh → dev-support/test-patch.sh

@@ -19,7 +19,7 @@ ulimit -n 1024
 ### SVN_REVISION and BUILD_URL are set by Hudson if it is run by patch process
 ### Read variables from properties file
 bindir=$(dirname $0)
-. $bindir/../test-patch.properties
+. $bindir/test-patch.properties
 
 ###############################################################################
 parseArgs() {
@@ -118,6 +118,7 @@ checkout () {
       echo "$status"
       cleanupAndExit 1
     fi
+    echo
   else   
     cd $BASEDIR
     $SVN revert -R .
@@ -178,8 +179,9 @@ setup () {
   echo "======================================================================"
   echo ""
   echo ""
-  echo "$ANT_HOME/bin/ant  -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -D${PROJECT_NAME}PatchProcess= clean tar > $PATCH_DIR/trunkJavacWarnings.txt 2>&1"
- $ANT_HOME/bin/ant -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -D${PROJECT_NAME}PatchProcess= clean tar > $PATCH_DIR/trunkJavacWarnings.txt 2>&1
+#  echo "$ANT_HOME/bin/ant  -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -D${PROJECT_NAME}PatchProcess= clean tar > $PATCH_DIR/trunkJavacWarnings.txt 2>&1"
+# $ANT_HOME/bin/ant -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -D${PROJECT_NAME}PatchProcess= clean tar > $PATCH_DIR/trunkJavacWarnings.txt 2>&1
+  $MAVEN_HOME/bin/mvn clean compile -DskipTests -D${PROJECT_NAME}PatchProcess -Ptest-patch > $PATCH_DIR/trunkJavacWarnings.txt 2>&1
   if [[ $? != 0 ]] ; then
     echo "Trunk compilation is broken?"
     cleanupAndExit 1
@@ -296,8 +298,11 @@ checkJavadocWarnings () {
   echo ""
   echo ""
   echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= clean javadoc | tee $PATCH_DIR/patchJavadocWarnings.txt"
-  $ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= clean javadoc | tee $PATCH_DIR/patchJavadocWarnings.txt
-  javadocWarnings=`$GREP -o '\[javadoc\] [0-9]* warning' $PATCH_DIR/patchJavadocWarnings.txt | awk '{total += $2} END {print total}'`
+  (cd root; mvn install)
+  (cd doclet; mvn install)
+  #$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= clean javadoc | tee $PATCH_DIR/patchJavadocWarnings.txt
+  $MAVEN_HOME/bin/mvn clean compile javadoc:javadoc -DskipTests -Pdocs -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/patchJavadocWarnings.txt 2>&1
+  javadocWarnings=`$GREP '\[WARNING\]' $PATCH_DIR/patchJavadocWarnings.txt | awk '/Javadoc Warnings/,EOF' | $GREP -v 'Javadoc Warnings' | awk 'BEGIN {total = 0} {total += 1} END {print total}'`
   echo ""
   echo ""
   echo "There appear to be $javadocWarnings javadoc warnings generated by the patched build."
@@ -327,8 +332,9 @@ checkJavacWarnings () {
   echo "======================================================================"
   echo ""
   echo ""
-  echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= clean tar > $PATCH_DIR/patchJavacWarnings.txt 2>&1"
-  $ANT_HOME/bin/ant -Dversion="${VERSION}" -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= clean tar > $PATCH_DIR/patchJavacWarnings.txt 2>&1
+  #echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= clean tar > $PATCH_DIR/patchJavacWarnings.txt 2>&1"
+  #$ANT_HOME/bin/ant -Dversion="${VERSION}" -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= clean tar > $PATCH_DIR/patchJavacWarnings.txt 2>&1
+  $MAVEN_HOME/bin/mvn clean compile -DskipTests -D${PROJECT_NAME}PatchProcess -Ptest-patch > $PATCH_DIR/patchJavacWarnings.txt 2>&1
   if [[ $? != 0 ]] ; then
     JIRA_COMMENT="$JIRA_COMMENT
 
@@ -337,8 +343,8 @@ checkJavacWarnings () {
   fi
   ### Compare trunk and patch javac warning numbers
   if [[ -f $PATCH_DIR/patchJavacWarnings.txt ]] ; then
-    trunkJavacWarnings=`$GREP -o '\[javac\] [0-9]* warning' $PATCH_DIR/trunkJavacWarnings.txt | awk '{total += $2} END {print total}'`
-    patchJavacWarnings=`$GREP -o '\[javac\] [0-9]* warning' $PATCH_DIR/patchJavacWarnings.txt | awk '{total += $2} END {print total}'`
+    trunkJavacWarnings=`$GREP '\[WARNING\]' $PATCH_DIR/trunkJavacWarnings.txt | awk 'BEGIN {total = 0} {total += 1} END {print total}'`
+    patchJavacWarnings=`$GREP '\[WARNING\]' $PATCH_DIR/patchJavacWarnings.txt | awk 'BEGIN {total = 0} {total += 1} END {print total}'`
     echo "There appear to be $trunkJavacWarnings javac compiler warnings before the patch and $patchJavacWarnings javac compiler warnings after applying the patch."
     if [[ $patchJavacWarnings != "" && $trunkJavacWarnings != "" ]] ; then
       if [[ $patchJavacWarnings -gt $trunkJavacWarnings ]] ; then
@@ -367,8 +373,10 @@ checkReleaseAuditWarnings () {
   echo "======================================================================"
   echo ""
   echo ""
-  echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= releaseaudit > $PATCH_DIR/patchReleaseAuditWarnings.txt 2>&1"
-  $ANT_HOME/bin/ant -Dversion="${VERSION}" -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= releaseaudit > $PATCH_DIR/patchReleaseAuditWarnings.txt 2>&1
+  #echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= releaseaudit > $PATCH_DIR/patchReleaseAuditWarnings.txt 2>&1"
+  #$ANT_HOME/bin/ant -Dversion="${VERSION}" -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= releaseaudit > $PATCH_DIR/patchReleaseAuditWarnings.txt 2>&1
+  $MAVEN_HOME/bin/mvn apache-rat:check -D${PROJECT_NAME}PatchProcess 2>&1
+  find . -name rat.txt | xargs cat > $PATCH_DIR/patchReleaseAuditWarnings.txt
 
   ### Compare trunk and patch release audit warning numbers
   if [[ -f $PATCH_DIR/patchReleaseAuditWarnings.txt ]] ; then
@@ -410,8 +418,10 @@ checkStyle () {
   echo "THIS IS NOT IMPLEMENTED YET"
   echo ""
   echo ""
-  echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= checkstyle"
-  $ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= checkstyle
+  #echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= checkstyle"
+  #$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= checkstyle
+  $MAVEN_HOME/bin/mvn compile checkstyle:checkstyle -D${PROJECT_NAME}PatchProcess
+
   JIRA_COMMENT_FOOTER="Checkstyle results: $BUILD_URL/artifact/trunk/build/test/checkstyle-errors.html
 $JIRA_COMMENT_FOOTER"
   ### TODO: calculate actual patchStyleErrors
@@ -441,27 +451,28 @@ checkFindbugsWarnings () {
   echo "======================================================================"
   echo ""
   echo ""
-  echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -Dfindbugs.home=$FINDBUGS_HOME -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= findbugs"
-  $ANT_HOME/bin/ant -Dversion="${VERSION}" -Dfindbugs.home=${FINDBUGS_HOME} -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= findbugs
+  #echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -Dfindbugs.home=$FINDBUGS_HOME -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= findbugs"
+  #$ANT_HOME/bin/ant -Dversion="${VERSION}" -Dfindbugs.home=${FINDBUGS_HOME} -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= findbugs
+  $MAVEN_HOME/bin/mvn clean compile findbugs:findbugs -D${PROJECT_NAME}PatchProcess -X
+
   if [ $? != 0 ] ; then
     JIRA_COMMENT="$JIRA_COMMENT
 
     -1 findbugs.  The patch appears to cause Findbugs (version ${findbugs_version}) to fail."
     return 1
   fi
-JIRA_COMMENT_FOOTER="Findbugs warnings: $BUILD_URL/artifact/trunk/build/test/findbugs/newPatchFindbugsWarnings.html
+JIRA_COMMENT_FOOTER="Findbugs warnings: $BUILD_URL/artifact/trunk/target/newPatchFindbugsWarnings.html
 $JIRA_COMMENT_FOOTER"
-  cp $BASEDIR/build/test/findbugs/*.xml $PATCH_DIR/patchFindbugsWarnings.xml
+  
+  cp $BASEDIR/hadoop-common/target/findbugsXml.xml $PATCH_DIR/patchFindbugsWarnings.xml
   $FINDBUGS_HOME/bin/setBugDatabaseInfo -timestamp "01/01/2000" \
     $PATCH_DIR/patchFindbugsWarnings.xml \
     $PATCH_DIR/patchFindbugsWarnings.xml
   findbugsWarnings=`$FINDBUGS_HOME/bin/filterBugs -first "01/01/2000" $PATCH_DIR/patchFindbugsWarnings.xml \
-    $BASEDIR/build/test/findbugs/newPatchFindbugsWarnings.xml | /usr/bin/awk '{print $1}'`
+    $PATCH_DIR/newPatchFindbugsWarnings.xml | /usr/bin/awk '{print $1}'`
   $FINDBUGS_HOME/bin/convertXmlToText -html \
-    $BASEDIR/build/test/findbugs/newPatchFindbugsWarnings.xml \
-    $BASEDIR/build/test/findbugs/newPatchFindbugsWarnings.html
-  cp $BASEDIR/build/test/findbugs/newPatchFindbugsWarnings.html $PATCH_DIR/newPatchFindbugsWarnings.html
-  cp $BASEDIR/build/test/findbugs/newPatchFindbugsWarnings.xml $PATCH_DIR/newPatchFindbugsWarnings.xml
+    $PATCH_DIR/newPatchFindbugsWarnings.xml \
+    $PATCH_DIR/newPatchFindbugsWarnings.html
 
   ### if current warnings greater than OK_FINDBUGS_WARNINGS
   if [[ $findbugsWarnings > $OK_FINDBUGS_WARNINGS ]] ; then
@@ -496,11 +507,12 @@ runCoreTests () {
      PreTestTarget="create-c++-configure"
   fi
 
-  echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME $PreTestTarget test-core"
-  $ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME  $PreTestTarget test-core
+  #echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME $PreTestTarget test-core"
+  #$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME  $PreTestTarget test-core
+  $MAVEN_HOME/bin/mvn clean test -Pnative -DHadoopPatchProcess
   if [[ $? != 0 ]] ; then
     ### Find and format names of failed tests
-    failed_tests=`grep -l -E "<failure|<error" $WORKSPACE/trunk/build/test/*.xml | sed -e "s|.*build/test/TEST-|                  |g" | sed -e "s|\.xml||g"`
+    failed_tests=`grep -l -E "<failure|<error" $WORKSPACE/trunk/target/hadoop-common/surefire-reports/*.xml | sed -e "s|.*target/surefire-reports/TEST-|                  |g" | sed -e "s|\.xml||g"`
     JIRA_COMMENT="$JIRA_COMMENT
 
     -1 core tests.  The patch failed these core unit tests:
@@ -534,8 +546,9 @@ runContribTests () {
   ### Kill any rogue build processes from the last attempt
   $PS auxwww | $GREP HadoopPatchProcess | /usr/bin/nawk '{print $2}' | /usr/bin/xargs -t -I {} /bin/kill -9 {} > /dev/null
 
-  echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" $ECLIPSE_PROPERTY -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no test-contrib"
-  $ANT_HOME/bin/ant -Dversion="${VERSION}" $ECLIPSE_PROPERTY -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no test-contrib
+  #echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" $ECLIPSE_PROPERTY -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no test-contrib"
+  #$ANT_HOME/bin/ant -Dversion="${VERSION}" $ECLIPSE_PROPERTY -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no test-contrib
+  echo "NOP"
   if [[ $? != 0 ]] ; then
     JIRA_COMMENT="$JIRA_COMMENT
 
@@ -564,8 +577,9 @@ checkInjectSystemFaults () {
   ### Kill any rogue build processes from the last attempt
   $PS auxwww | $GREP HadoopPatchProcess | /usr/bin/nawk '{print $2}' | /usr/bin/xargs -t -I {} /bin/kill -9 {} > /dev/null
 
-  echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME inject-system-faults"
-  $ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME inject-system-faults
+  #echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME inject-system-faults"
+  #$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME inject-system-faults
+  echo "NOP"
   if [[ $? != 0 ]] ; then
     JIRA_COMMENT="$JIRA_COMMENT
 

+ 38 - 0
hadoop-annotations/pom.xml

@@ -0,0 +1,38 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project>
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>hadoop-project</artifactId>
+    <version>0.23.0-SNAPSHOT</version>
+    <relativePath>../hadoop-project</relativePath>
+  </parent>
+  <groupId>org.apache.hadoop</groupId>
+  <artifactId>hadoop-annotations</artifactId>
+  <version>0.23.0-SNAPSHOT</version>
+  <description>Apache Hadoop Annotations</description>
+  <name>Apache Hadoop Annotations</name>
+  <packaging>jar</packaging>
+
+  <dependencies>
+    <dependency>
+      <groupId>jdiff</groupId>
+      <artifactId>jdiff</artifactId>
+      <scope>compile</scope>
+    </dependency>
+  </dependencies>
+
+</project>

+ 0 - 0
common/src/java/org/apache/hadoop/classification/InterfaceAudience.java → hadoop-annotations/src/main/java/org/apache/hadoop/classification/InterfaceAudience.java


+ 0 - 0
common/src/java/org/apache/hadoop/classification/InterfaceStability.java → hadoop-annotations/src/main/java/org/apache/hadoop/classification/InterfaceStability.java


+ 0 - 0
common/src/java/org/apache/hadoop/classification/tools/ExcludePrivateAnnotationsJDiffDoclet.java → hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/ExcludePrivateAnnotationsJDiffDoclet.java


+ 0 - 0
common/src/java/org/apache/hadoop/classification/tools/ExcludePrivateAnnotationsStandardDoclet.java → hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/ExcludePrivateAnnotationsStandardDoclet.java


+ 0 - 0
common/src/java/org/apache/hadoop/classification/tools/RootDocProcessor.java → hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/RootDocProcessor.java


+ 0 - 0
common/src/java/org/apache/hadoop/classification/tools/StabilityOptions.java → hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java


+ 0 - 0
common/src/java/org/apache/hadoop/classification/tools/package-info.java → hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/package-info.java


+ 99 - 0
hadoop-assemblies/pom.xml

@@ -0,0 +1,99 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project>
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.hadoop</groupId>
+  <artifactId>hadoop-assemblies</artifactId>
+  <version>0.23.0-SNAPSHOT</version>
+  <name>Apache Hadoop Assemblies</name>
+  <description>Apache Hadoop Assemblies</description>
+
+  <properties>
+    <failIfNoTests>false</failIfNoTests>
+  </properties>
+
+  <build>
+    <pluginManagement>
+      <plugins>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-enforcer-plugin</artifactId>
+          <version>1.0</version>
+        </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-assembly-plugin</artifactId>
+          <version>2.2-beta-3</version>
+        </plugin>
+        <plugin>
+          <groupId>org.apache.rat</groupId>
+          <artifactId>apache-rat-plugin</artifactId>
+          <version>0.7</version>
+        </plugin>
+      </plugins>
+    </pluginManagement>
+
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-enforcer-plugin</artifactId>
+        <inherited>false</inherited>
+        <configuration>
+          <rules>
+            <requireMavenVersion>
+              <version>[3.0.0,)</version>
+            </requireMavenVersion>
+            <requireJavaVersion>
+              <version>1.6</version>
+            </requireJavaVersion>
+            <requireOS>
+              <family>unix</family>
+            </requireOS>
+          </rules>
+        </configuration>
+        <executions>
+          <execution>
+            <id>clean</id>
+            <goals>
+              <goal>enforce</goal>
+            </goals>
+            <phase>pre-clean</phase>
+          </execution>
+          <execution>
+            <id>default</id>
+            <goals>
+              <goal>enforce</goal>
+            </goals>
+            <phase>validate</phase>
+          </execution>
+          <execution>
+            <id>site</id>
+            <goals>
+              <goal>enforce</goal>
+            </goals>
+            <phase>pre-site</phase>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.rat</groupId>
+        <artifactId>apache-rat-plugin</artifactId>
+      </plugin>
+    </plugins>
+  </build>
+</project>

+ 113 - 0
hadoop-assemblies/src/main/resources/assemblies/hadoop-bintar.xml

@@ -0,0 +1,113 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<assembly>
+  <id>hadoop-bintar</id>
+  <formats>
+    <format>dir</format>
+  </formats>
+  <includeBaseDirectory>false</includeBaseDirectory>
+  <fileSets>
+    <fileSet>
+      <directory>${basedir}/src/main/bin</directory>
+      <outputDirectory>/bin</outputDirectory>
+      <includes>
+        <include>hadoop</include>
+      </includes>
+      <fileMode>0755</fileMode>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}/src/main/conf</directory>
+      <outputDirectory>/etc/hadoop</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}/src/main/bin</directory>
+      <outputDirectory>/libexec</outputDirectory>
+      <includes>
+        <include>hadoop-config.sh</include>
+      </includes>
+      <fileMode>0755</fileMode>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}/src/main/bin</directory>
+      <outputDirectory>/sbin</outputDirectory>
+      <includes>
+        <include>*.sh</include>
+      </includes>
+      <excludes>
+        <exclude>hadoop-config.sh</exclude>
+      </excludes>
+      <fileMode>0755</fileMode>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}/src/main/packages</directory>
+      <outputDirectory>/sbin</outputDirectory>
+      <includes>
+        <include>*.sh</include>
+      </includes>
+      <fileMode>0755</fileMode>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}</directory>
+      <outputDirectory>/share/doc/hadoop/${hadoop.component}</outputDirectory>
+      <includes>
+        <include>*.txt</include>
+      </includes>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}/src/main/webapps</directory>
+      <outputDirectory>/share/hadoop/${hadoop.component}/webapps</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}/src/main/conf</directory>
+      <outputDirectory>/share/hadoop/${hadoop.component}/templates</outputDirectory>
+      <includes>
+        <include>*-site.xml</include>
+      </includes>
+    </fileSet>
+    <fileSet>
+      <directory>${project.build.directory}</directory>
+      <outputDirectory>/share/hadoop/${hadoop.component}</outputDirectory>
+      <includes>
+        <include>${project.artifactId}-${project.version}.jar</include>
+        <include>${project.artifactId}-${project.version}-tests.jar</include>
+        <include>${project.artifactId}-${project.version}-sources.jar</include>
+        <include>${project.artifactId}-${project.version}-test-sources.jar</include>
+      </includes>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}/dev-support/jdiff</directory>
+      <outputDirectory>/share/hadoop/${hadoop.component}/jdiff</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>${project.build.directory}/site/jdiff/xml</directory>
+      <outputDirectory>/share/hadoop/${hadoop.component}/jdiff</outputDirectory>
+    </fileSet>
+  </fileSets>
+  <dependencySets>
+    <dependencySet>
+      <outputDirectory>/share/hadoop/${hadoop.component}/lib</outputDirectory>
+      <unpack>false</unpack>
+      <scope>compile</scope>
+      <useProjectArtifact>false</useProjectArtifact>
+      <excludes>
+        <exclude>org.apache.ant:*:jar</exclude>
+        <exclude>org.apache.hadoop:hadoop-*:jar</exclude>
+        <exclude>jdiff:jdiff:jar</exclude>
+      </excludes>
+    </dependencySet>
+  </dependencySets>
+</assembly>

+ 37 - 0
hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml

@@ -0,0 +1,37 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
+  <id>hadoop-src</id>
+  <formats>
+    <format>dir</format>
+  </formats>
+  <includeBaseDirectory>false</includeBaseDirectory>
+  <fileSets>
+    <fileSet>
+      <directory>${project.basedir}</directory>
+      <outputDirectory>src/</outputDirectory>
+      <useDefaultExcludes>true</useDefaultExcludes>
+      <excludes>
+        <exclude>**/*.log</exclude>
+        <exclude>**/build/**</exclude>
+        <exclude>**/target/**</exclude>
+      </excludes>
+    </fileSet>
+  </fileSets>
+</assembly>

+ 85 - 0
hadoop-assemblies/src/main/resources/assemblies/hadoop-tar.xml

@@ -0,0 +1,85 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<assembly>
+  <id>hadoop-tar</id>
+  <formats>
+    <format>dir</format>
+  </formats>
+  <includeBaseDirectory>false</includeBaseDirectory>
+  <fileSets>
+    <fileSet>
+      <directory>${basedir}</directory>
+      <outputDirectory>/</outputDirectory>
+      <includes>
+        <include>*.txt</include>
+      </includes>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}/src/main/bin</directory>
+      <outputDirectory>/bin</outputDirectory>
+      <includes>
+        <include>*</include>
+      </includes>
+      <fileMode>0755</fileMode>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}/src/main/bin</directory>
+      <outputDirectory>/libexec</outputDirectory>
+      <includes>
+        <include>hadoop-config.sh</include>
+      </includes>
+      <fileMode>0755</fileMode>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}/src/main/conf</directory>
+      <outputDirectory>/conf</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}/src/main/webapps</directory>
+      <outputDirectory>/webapps</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>${project.build.directory}/site</directory>
+      <outputDirectory>/docs</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>${project.build.directory}</directory>
+      <outputDirectory>/</outputDirectory>
+      <includes>
+        <include>${project.artifactId}-${project.version}.jar</include>
+        <include>${project.artifactId}-${project.version}-tests.jar</include>
+      </includes>
+    </fileSet>
+    <fileSet>
+      <directory>${project.build.directory}/src</directory>
+      <outputDirectory>/src</outputDirectory>
+    </fileSet>
+  </fileSets>
+  <dependencySets>
+    <dependencySet>
+      <outputDirectory>/lib</outputDirectory>
+      <unpack>false</unpack>
+      <scope>compile</scope>
+      <useProjectArtifact>false</useProjectArtifact>
+      <excludes>
+        <exclude>org.apache.ant:*:jar</exclude>
+        <exclude>org.apache.hadoop:hadoop-*:jar</exclude>
+        <exclude>jdiff:jdiff:jar</exclude>
+      </excludes>
+    </dependencySet>
+  </dependencySets>
+</assembly>

+ 61 - 0
hadoop-common/BUILDING.txt

@@ -0,0 +1,61 @@
+----------------------------------------------------------------------------------
+Requirements:
+
+* Unix System
+* JDK 1.6
+* Maven 3.0
+* Forrest 0.8 (if generating docs)
+* Findbugs 1.3.9 (if running findbugs)
+* Autotools (if compiling native code)
+* Internet connection for first build (to fetch all Maven and Hadoop dependencies)
+
+----------------------------------------------------------------------------------
+Maven modules:
+
+  hadoop                      (Main Hadoop project)
+         - hadoop-project     (Parent POM for all Hadoop Maven modules.             )
+                              (All plugins & dependencies versions are defined here.)
+         - hadoop-annotations (Generates the Hadoop doclet used to generated the Javadocs)
+         - hadoop-common      (Hadoop common)
+
+----------------------------------------------------------------------------------
+Where to run Maven from?
+
+  It can be run from any module. The only catch is that if not run from utrunk
+  all modules that are not part of the build run must be installed in the local
+  Maven cache or available in a Maven repository.
+
+----------------------------------------------------------------------------------
+Maven build goals:
+
+ * Clean                     : mvn clean
+ * Compile                   : mvn compile [-Pnative]
+ * Run tests                 : mvn test [-Pnative]
+ * Create JAR                : mvn package
+ * Run findbugs              : mvn compile findbugs:findbugs
+ * Run checkstyle            : mvn compile checkstyle:checkstyle
+ * Install JAR in M2 cache   : mvn install
+ * Deploy JAR to Maven repo  : mvn deploy
+ * Run clover                : mvn test -Pclover [-DcloverLicenseLocation=${user.name}/.clover.license]
+ * Run Rat                   : mvn apache-rat:check
+ * Build javadocs            : mvn javadoc:javadoc
+ * Build TAR                 : mvn package [-Ptar][-Pbintar][-Pdocs][-Psrc][-Pnative]
+
+ Build options:
+
+  * Use -Pnative to compile/bundle native code
+  * Use -Dsnappy.prefix=(/usr/local) & -Dbundle.snappy=(false) to compile
+    Snappy JNI bindings and to bundle Snappy SO files
+  * Use -Pdocs to generate & bundle the documentation in the TAR (using -Ptar)
+  * Use -Psrc to bundle the source in the TAR (using -Ptar)
+
+   Tests options:
+
+  * Use -DskipTests to skip tests when running the following Maven goals:
+    'package',  'install', 'deploy' or 'verify'
+  * -Dtest=<TESTCLASSNAME>,....
+  * -Dtest.exclude=<TESTCLASSNAME>
+  * -Dtest.exclude.pattern=**/<TESTCLASSNAME1>.java,**/<TESTCLASSNAME2>.java
+
+
+----------------------------------------------------------------------------------

+ 3 - 0
common/CHANGES.txt → hadoop-common/CHANGES.txt

@@ -300,6 +300,9 @@ Trunk (unreleased changes)
     HADOOP-7178. Add a parameter, useRawLocalFileSystem, to copyToLocalFile(..)
     in FileSystem.  (Uma Maheswara Rao G via szetszwo)
 
+    HADOOP-6671. Use maven for hadoop common builds. (Alejandro Abdelnur
+    via tomwhite)
+
   OPTIMIZATIONS
   
     HADOOP-7333. Performance improvement in PureJavaCrc32. (Eric Caspole

+ 0 - 0
common/LICENSE.txt → hadoop-common/LICENSE.txt


+ 0 - 0
common/NOTICE.txt → hadoop-common/NOTICE.txt


+ 0 - 0
common/README.txt → hadoop-common/README.txt


+ 3 - 3
common/src/test/checkstyle.xml → hadoop-common/dev-support/checkstyle.xml

@@ -51,7 +51,7 @@
 
     <!-- Checks that a package.html file exists for each package.     -->
     <!-- See http://checkstyle.sf.net/config_javadoc.html#PackageHtml -->
-    <module name="PackageHtml"/>
+    <module name="JavadocPackage"/>
 
     <!-- Checks whether files end with a new line.                        -->
     <!-- See http://checkstyle.sf.net/config_misc.html#NewlineAtEndOfFile -->
@@ -61,6 +61,8 @@
     <!-- See http://checkstyle.sf.net/config_misc.html#Translation -->
     <module name="Translation"/>
 
+    <module name="FileLength"/>
+    <module name="FileTabCharacter"/>
 
     <module name="TreeWalker">
 
@@ -112,7 +114,6 @@
 
         <!-- Checks for Size Violations.                    -->
         <!-- See http://checkstyle.sf.net/config_sizes.html -->
-        <module name="FileLength"/>
         <module name="LineLength"/>
         <module name="MethodLength"/>
         <module name="ParameterNumber"/>
@@ -126,7 +127,6 @@
         <module name="NoWhitespaceBefore"/>
         <module name="ParenPad"/>
         <module name="TypecastParenPad"/>
-        <module name="TabCharacter"/>
         <module name="WhitespaceAfter">
 	    	<property name="tokens" value="COMMA, SEMI"/>
 		</module>

+ 0 - 0
common/src/test/findbugsExcludeFile.xml → hadoop-common/dev-support/findbugsExcludeFile.xml


+ 0 - 0
common/lib/jdiff/hadoop-core_0.20.0.xml → hadoop-common/dev-support/jdiff/hadoop-core_0.20.0.xml


+ 0 - 0
common/lib/jdiff/hadoop-core_0.21.0.xml → hadoop-common/dev-support/jdiff/hadoop-core_0.21.0.xml


+ 0 - 0
common/lib/jdiff/hadoop_0.17.0.xml → hadoop-common/dev-support/jdiff/hadoop_0.17.0.xml


+ 0 - 0
common/lib/jdiff/hadoop_0.18.1.xml → hadoop-common/dev-support/jdiff/hadoop_0.18.1.xml


+ 0 - 0
common/lib/jdiff/hadoop_0.18.2.xml → hadoop-common/dev-support/jdiff/hadoop_0.18.2.xml


+ 0 - 0
common/lib/jdiff/hadoop_0.18.3.xml → hadoop-common/dev-support/jdiff/hadoop_0.18.3.xml


+ 0 - 0
common/lib/jdiff/hadoop_0.19.0.xml → hadoop-common/dev-support/jdiff/hadoop_0.19.0.xml


+ 0 - 0
common/lib/jdiff/hadoop_0.19.1.xml → hadoop-common/dev-support/jdiff/hadoop_0.19.1.xml


+ 0 - 0
common/lib/jdiff/hadoop_0.19.2.xml → hadoop-common/dev-support/jdiff/hadoop_0.19.2.xml


+ 0 - 0
common/lib/jdiff/hadoop_0.20.0.xml → hadoop-common/dev-support/jdiff/hadoop_0.20.0.xml


+ 0 - 0
common/lib/jdiff/hadoop_0.20.1.xml → hadoop-common/dev-support/jdiff/hadoop_0.20.1.xml


+ 0 - 0
common/lib/jdiff/hadoop_0.20.2.xml → hadoop-common/dev-support/jdiff/hadoop_0.20.2.xml


+ 9 - 3
common/src/saveVersion.sh → hadoop-common/dev-support/saveVersion.sh

@@ -43,14 +43,20 @@ else
   branch="Unknown"
   url="file://$cwd"
 fi
-srcChecksum=`find src -name '*.java' | LC_ALL=C sort | xargs md5sum | md5sum | cut -d ' ' -f 1`
 
-mkdir -p $build_dir/src/org/apache/hadoop
+which md5sum > /dev/null
+if [ "$?" = "0" ] ; then
+  srcChecksum=`find src/main/java -name '*.java' | LC_ALL=C sort | xargs md5sum | md5sum | cut -d ' ' -f 1`
+else
+  srcChecksum="Not Available"
+fi
+
+mkdir -p $build_dir/org/apache/hadoop
 cat << EOF | \
   sed -e "s/VERSION/$version/" -e "s/USER/$user/" -e "s/DATE/$date/" \
       -e "s|URL|$url|" -e "s/REV/$revision/" \
       -e "s|BRANCH|$branch|" -e "s/SRCCHECKSUM/$srcChecksum/" \
-      > $build_dir/src/org/apache/hadoop/package-info.java
+      > $build_dir/org/apache/hadoop/package-info.java
 /*
  * Generated by src/saveVersion.sh
  */

+ 1019 - 0
hadoop-common/pom.xml

@@ -0,0 +1,1019 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project>
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>hadoop-project</artifactId>
+    <version>0.23.0-SNAPSHOT</version>
+    <relativePath>../hadoop-project</relativePath>
+  </parent>
+  <groupId>org.apache.hadoop</groupId>
+  <artifactId>hadoop-common</artifactId>
+  <version>0.23.0-SNAPSHOT</version>
+  <description>Apache Hadoop Common</description>
+  <name>Apache Hadoop Common</name>
+  <packaging>jar</packaging>
+
+  <properties>
+    <test.build.data>${project.build.directory}/test/data</test.build.data>
+    <hadoop.log.dir>${project.build.directory}/log</hadoop.log.dir>
+    <test.build.webapps>${project.build.directory}/test-classes/webapps</test.build.webapps>
+    <test.cache.data>${project.build.directory}/test-classes</test.cache.data>
+    <test.build.classes>${project.build.directory}/test-classes</test.build.classes>
+
+    <build.platform>${os.name}-${os.arch}-${sun.arch.data.model}</build.platform>
+    <snappy.prefix>/usr/local</snappy.prefix>
+    <snappy.lib>${snappy.prefix}/lib</snappy.lib>
+    <bundle.snappy>false</bundle.snappy>
+    
+    <hadoop.component>common</hadoop.component>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-annotations</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.google.guava</groupId>
+      <artifactId>guava</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-cli</groupId>
+      <artifactId>commons-cli</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-math</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>xmlenc</groupId>
+      <artifactId>xmlenc</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-httpclient</groupId>
+      <artifactId>commons-httpclient</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-codec</groupId>
+      <artifactId>commons-codec</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-net</groupId>
+      <artifactId>commons-net</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>javax.servlet</groupId>
+      <artifactId>servlet-api</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.mortbay.jetty</groupId>
+      <artifactId>jetty</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.mortbay.jetty</groupId>
+      <artifactId>jetty-util</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>tomcat</groupId>
+      <artifactId>jasper-compiler</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>tomcat</groupId>
+      <artifactId>jasper-runtime</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>javax.servlet.jsp</groupId>
+      <artifactId>jsp-api</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-el</groupId>
+      <artifactId>commons-el</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-logging</groupId>
+      <artifactId>commons-logging</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-logging</groupId>
+      <artifactId>commons-logging-api</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>log4j</groupId>
+      <artifactId>log4j</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>net.java.dev.jets3t</groupId>
+      <artifactId>jets3t</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.mina</groupId>
+      <artifactId>mina-core</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.ftpserver</groupId>
+      <artifactId>ftplet-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.ftpserver</groupId>
+      <artifactId>ftpserver-core</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.ftpserver</groupId>
+      <artifactId>ftpserver-deprecated</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-lang</groupId>
+      <artifactId>commons-lang</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-collections</groupId>
+      <artifactId>commons-collections</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-configuration</groupId>
+      <artifactId>commons-configuration</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>hsqldb</groupId>
+      <artifactId>hsqldb</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-log4j12</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jdt</groupId>
+      <artifactId>core</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>oro</groupId>
+      <artifactId>oro</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-mapper-asl</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.aspectj</groupId>
+      <artifactId>aspectjrt</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-all</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>avro</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>net.sf.kosmosfs</groupId>
+      <artifactId>kfs</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.ant</groupId>
+      <artifactId>ant</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.google.protobuf</groupId>
+      <artifactId>protobuf-java</artifactId>
+      <scope>compile</scope>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <forkMode>always</forkMode>
+          <forkedProcessTimeoutInSeconds>600</forkedProcessTimeoutInSeconds>
+          <argLine>-Xmx1024m</argLine>
+          <environmentVariables>
+            <LD_LIBRARY_PATH>${env.LD_LIBRARY_PATH}:${project.build.directory}/native/lib</LD_LIBRARY_PATH>
+          </environmentVariables>
+          <systemPropertyVariables>
+
+            <!-- TODO: all references in testcases should be updated to this default -->
+            <test.build.data>${test.build.data}</test.build.data>
+            <test.build.webapps>${test.build.webapps}</test.build.webapps>
+            <test.cache.data>${test.cache.data}</test.cache.data>
+            <hadoop.log.dir>${hadoop.log.dir}</hadoop.log.dir>
+            <test.build.classes>${test.build.classes}</test.build.classes>
+
+            <java.net.preferIPv4Stack>true</java.net.preferIPv4Stack>
+            <java.security.krb5.conf>${basedir}/src/test/resources/krb5.conf</java.security.krb5.conf>
+          </systemPropertyVariables>
+          <includes>
+            <include>**/Test*.java</include>
+          </includes>
+          <excludes>
+            <exclude>**/${test.exclude}.java</exclude>
+            <exclude>${test.exclude.pattern}</exclude>
+            <exclude>**/Test*$*.java</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <executions>
+          <execution>
+            <phase>prepare-package</phase>
+            <goals>
+              <goal>jar</goal>
+              <goal>test-jar</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-source-plugin</artifactId>
+        <executions>
+          <execution>
+            <phase>prepare-package</phase>
+            <goals>
+              <goal>jar</goal>
+              <goal>test-jar</goal>
+            </goals>
+          </execution>
+        </executions>
+        <configuration>
+          <attach>true</attach>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>findbugs-maven-plugin</artifactId>
+        <configuration>
+          <excludeFilterFile>${basedir}/dev-support/findbugsExcludeFile.xml</excludeFilterFile>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>save-version</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <target>
+                <mkdir dir="${project.build.directory}/generated-src/main/java"/>
+                <exec executable="sh">
+                  <arg
+                      line="${basedir}/dev-support/saveVersion.sh ${project.version} ${project.build.directory}/generated-src/main/java"/>
+                </exec>
+              </target>
+            </configuration>
+          </execution>
+          <execution>
+            <id>generate-test-sources</id>
+            <phase>generate-test-sources</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <target>
+
+                <mkdir dir="${project.build.directory}/generated-src/test/java"/>
+
+                <taskdef name="recordcc" classname="org.apache.hadoop.record.compiler.ant.RccTask">
+                  <classpath refid="maven.compile.classpath"/>
+                </taskdef>
+                <recordcc destdir="${project.build.directory}/generated-src/test/java">
+                  <fileset dir="${basedir}/src/test/ddl" includes="**/*.jr"/>
+                </recordcc>
+
+                <taskdef name="schema" classname="org.apache.avro.specific.SchemaTask">
+                  <classpath refid="maven.test.classpath"/>
+                </taskdef>
+                <schema destdir="${project.build.directory}/generated-src/test/java">
+                  <fileset dir="${basedir}/src/test">
+                    <include name="**/*.avsc"/>
+                  </fileset>
+                </schema>
+
+                <taskdef name="schema" classname="org.apache.avro.specific.ProtocolTask">
+                  <classpath refid="maven.test.classpath"/>
+                </taskdef>
+                <schema destdir="${project.build.directory}/generated-src/test/java">
+                  <fileset dir="${basedir}/src/test">
+                    <include name="**/*.avpr"/>
+                  </fileset>
+                </schema>
+              </target>
+            </configuration>
+          </execution>
+          <execution>
+            <id>create-log-dir</id>
+            <phase>process-test-resources</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <target>
+                <!--
+                TODO: there are tests (TestLocalFileSystem#testCopy) that fail if data
+                TODO: from a previous run is present
+                -->
+                <delete dir="${test.build.data}"/>
+                <mkdir dir="${hadoop.log.dir}"/>
+
+                <copy toDir="${project.build.directory}/test-classes">
+                  <fileset dir="${basedir}/src/main/conf"/>
+                </copy>
+              </target>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>build-helper-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>add-source</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>add-source</goal>
+            </goals>
+            <configuration>
+              <sources>
+                <source>${project.build.directory}/generated-src/main/java</source>
+              </sources>
+            </configuration>
+          </execution>
+          <execution>
+            <id>add-test-source</id>
+            <phase>generate-test-sources</phase>
+            <goals>
+              <goal>add-test-source</goal>
+            </goals>
+            <configuration>
+              <sources>
+                <source>${project.build.directory}/generated-src/test/java</source>
+              </sources>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-checkstyle-plugin</artifactId>
+        <configuration>
+          <configLocation>file://${basedir}/dev-support/checkstyle.xml</configLocation>
+          <failOnViolation>false</failOnViolation>
+          <format>xml</format>
+          <format>html</format>
+          <outputFile>${project.build.directory}/test/checkstyle-errors.xml</outputFile>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.rat</groupId>
+        <artifactId>apache-rat-plugin</artifactId>
+        <configuration>
+          <excludes>
+            <exclude>CHANGES.txt</exclude>
+            <exclude>.idea/**</exclude>
+            <exclude>src/main/conf/*</exclude>
+            <exclude>src/main/docs/**</exclude>
+            <exclude>dev-support/jdiff/**</exclude>
+            <exclude>src/main/native/*</exclude>
+            <exclude>src/main/native/config/*</exclude>
+            <exclude>src/main/resources/META-INF/services/org.apache.hadoop.security.SecurityInfo</exclude>
+            <exclude>src/main/native/m4/*</exclude>
+            <exclude>src/test/empty-file</exclude>
+            <exclude>src/test/all-tests</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-javadoc-plugin</artifactId>
+        <configuration>
+          <linksource>true</linksource>
+          <quiet>true</quiet>
+          <verbose>false</verbose>
+          <source>${maven.compile.source}</source>
+          <charset>${maven.compile.encoding}</charset>
+          <reportOutputDirectory>${project.build.directory}/site</reportOutputDirectory>
+          <destDir>api</destDir>
+          <groups>
+            <group>
+              <title>${project.name} API</title>
+              <packages>org.apache.hadoop*</packages>
+            </group>
+          </groups>
+          <doclet>org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsStandardDoclet</doclet>
+          <docletArtifacts>
+            <docletArtifact>
+              <groupId>org.apache.hadoop</groupId>
+              <artifactId>hadoop-annotations</artifactId>
+              <version>${project.version}</version>
+            </docletArtifact>
+          </docletArtifacts>
+          <useStandardDocletOptions>true</useStandardDocletOptions>
+
+          <!-- switch on dependency-driven aggregation -->
+          <includeDependencySources>true</includeDependencySources>
+
+          <dependencySourceIncludes>
+            <!-- include ONLY dependencies I control -->
+            <dependencySourceInclude>org.apache.hadoop:hadoop-annotations</dependencySourceInclude>
+          </dependencySourceIncludes>
+
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+
+  <profiles>
+    <profile>
+      <id>native</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <properties>
+        <snappy.prefix>/usr/local</snappy.prefix>
+        <snappy.lib>${snappy.prefix}/lib</snappy.lib>
+        <snappy.include>${snappy.prefix}/include</snappy.include>
+      </properties>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-antrun-plugin</artifactId>
+            <executions>
+              <execution>
+                <id>compile</id>
+                <phase>compile</phase>
+                <goals>
+                  <goal>run</goal>
+                </goals>
+                <configuration>
+                  <target>
+                    <mkdir dir="${project.build.directory}/native/javah"/>
+                    <copy toDir="${project.build.directory}/native">
+                      <fileset dir="${basedir}/src/main/native"/>
+                    </copy>
+                    <mkdir dir="${project.build.directory}/native/m4"/>
+                  </target>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+          <plugin>
+            <groupId>org.codehaus.mojo</groupId>
+            <artifactId>native-maven-plugin</artifactId>
+            <executions>
+              <execution>
+                <phase>compile</phase>
+                <goals>
+                  <goal>javah</goal>
+                </goals>
+                <configuration>
+                  <javahPath>${env.JAVA_HOME}/bin/javah</javahPath>
+                  <javahClassNames>
+                    <javahClassName>org.apache.hadoop.io.compress.zlib.ZlibCompressor</javahClassName>
+                    <javahClassName>org.apache.hadoop.io.compress.zlib.ZlibDecompressor</javahClassName>
+                    <javahClassName>org.apache.hadoop.security.JniBasedUnixGroupsMapping</javahClassName>
+                    <javahClassName>org.apache.hadoop.io.nativeio.NativeIO</javahClassName>
+                    <javahClassName>org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping</javahClassName>
+                    <javahClassName>org.apache.hadoop.io.compress.snappy.SnappyCompressor</javahClassName>
+                    <javahClassName>org.apache.hadoop.io.compress.snappy.SnappyDecompressor</javahClassName>
+                  </javahClassNames>
+                  <javahOutputDirectory>${project.build.directory}/native/javah</javahOutputDirectory>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+          <plugin>
+            <groupId>org.codehaus.mojo</groupId>
+            <artifactId>make-maven-plugin</artifactId>
+            <executions>
+              <execution>
+                <id>compile</id>
+                <phase>compile</phase>
+                <goals>
+                  <goal>autoreconf</goal>
+                  <goal>configure</goal>
+                  <goal>make-install</goal>
+                </goals>
+              </execution>
+            </executions>
+            <configuration>
+              <!-- autoreconf settings -->
+              <workDir>${project.build.directory}/native</workDir>
+              <arguments>
+                <argument>-i</argument>
+                <argument>-f</argument>
+              </arguments>
+
+              <!-- configure settings -->
+              <configureEnvironment>
+                <property>
+                  <name>OS_NAME</name>
+                  <value>${os.name}</value>
+                </property>
+                <property>
+                  <name>OS_ARCH</name>
+                  <value>${os.arch}</value>
+                </property>
+                <property>
+                  <name>JVM_DATA_MODEL</name>
+                  <value>${sun.arch.data.model}</value>
+                </property>
+              </configureEnvironment>
+              <configureOptions>
+                <configureOption>CPPFLAGS=-I${snappy.include}</configureOption>
+                <configureOption>LDFLAGS=-L${snappy.lib}</configureOption>
+              </configureOptions>
+              <configureWorkDir>${project.build.directory}/native</configureWorkDir>
+              <prefix>/usr/local</prefix>
+
+              <!-- make settings -->
+              <installEnvironment>
+                <property>
+                  <name>OS_NAME</name>
+                  <value>${os.name}</value>
+                </property>
+                <property>
+                  <name>OS_ARCH</name>
+                  <value>${os.arch}</value>
+                </property>
+                <property>
+                  <name>JVM_DATA_MODEL</name>
+                  <value>${sun.arch.data.model}</value>
+                </property>
+                <property>
+                  <name>HADOOP_NATIVE_SRCDIR</name>
+                  <value>${project.build.directory}/native</value>
+                </property>
+              </installEnvironment>
+
+              <!-- configure & make settings -->
+              <destDir>${project.build.directory}/native/target</destDir>
+
+            </configuration>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+
+    <profile>
+      <id>docs</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <properties>
+        <jdiff.stable.api>0.20.2</jdiff.stable.api>
+        <jdiff.stability>-unstable</jdiff.stability>
+        <jdiff.compatibility></jdiff.compatibility>
+        <jdiff.javadoc.maxmemory>512m</jdiff.javadoc.maxmemory>
+      </properties>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-javadoc-plugin</artifactId>
+            <executions>
+              <execution>
+                <goals>
+                  <goal>javadoc</goal>
+                </goals>
+                <phase>prepare-package</phase>
+              </execution>
+            </executions>
+          </plugin>
+          <plugin>
+            <groupId>org.codehaus.mojo</groupId>
+            <artifactId>findbugs-maven-plugin</artifactId>
+            <executions>
+              <execution>
+                <goals>
+                  <goal>findbugs</goal>
+                </goals>
+                <phase>prepare-package</phase>
+              </execution>
+            </executions>
+            <configuration>
+              <excludeFilterFile>${basedir}/dev-support/findbugsExcludeFile.xml</excludeFilterFile>
+            </configuration>
+          </plugin>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-dependency-plugin</artifactId>
+            <executions>
+              <execution>
+                <id>site</id>
+                <phase>prepare-package</phase>
+                <goals>
+                  <goal>copy</goal>
+                </goals>
+                <configuration>
+                  <artifactItems>
+                    <artifactItem>
+                      <groupId>jdiff</groupId>
+                      <artifactId>jdiff</artifactId>
+                      <version>${jdiff.version}</version>
+                      <overWrite>false</overWrite>
+                      <outputDirectory>${project.build.directory}</outputDirectory>
+                      <destFileName>jdiff.jar</destFileName>
+                    </artifactItem>
+                    <artifactItem>
+                      <groupId>org.apache.hadoop</groupId>
+                      <artifactId>hadoop-annotations</artifactId>
+                      <version>${hadoop.annotations.version}</version>
+                      <overWrite>false</overWrite>
+                      <outputDirectory>${project.build.directory}</outputDirectory>
+                      <destFileName>hadoop-annotations.jar</destFileName>
+                    </artifactItem>
+                  </artifactItems>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-antrun-plugin</artifactId>
+            <executions>
+              <execution>
+                <id>site</id>
+                <phase>prepare-package</phase>
+                <goals>
+                  <goal>run</goal>
+                </goals>
+                <configuration>
+                  <target>
+
+                    <mkdir dir="${project.build.directory}/docs-src"/>
+
+                    <copy todir="${project.build.directory}/docs-src">
+                      <fileset dir="${basedir}/src/main/docs"/>
+                    </copy>
+
+                    <!-- Docs -->
+                    <exec dir="${project.build.directory}/docs-src"
+                          executable="${env.FORREST_HOME}/bin/forrest"
+                          failonerror="true">
+                    </exec>
+                    <copy todir="${project.build.directory}/site">
+                      <fileset dir="${project.build.directory}/docs-src/build/site"/>
+                    </copy>
+                    <copy file="${project.build.directory}/docs-src/releasenotes.html"
+                          todir="${project.build.directory}/site"/>
+                    <style basedir="${basedir}/src/main/resources"
+                           destdir="${project.build.directory}/site"
+                           includes="core-default.xml"
+                           style="${basedir}/src/main/xsl/configuration.xsl"/>
+
+                    <!-- Convert 'CHANGES.txt' to 'changes.html" -->
+                    <exec executable="perl" input="${basedir}/../CHANGES.txt"
+                          output="${project.build.directory}/site/changes.html"
+                          failonerror="true">
+                      <arg value="${project.build.directory}/docs-src/changes/changes2html.pl"/>
+                    </exec>
+                    <copy todir="${project.build.directory}/site">
+                      <fileset dir="${project.build.directory}/docs-src/changes" includes="*.css"/>
+                    </copy>
+
+                    <!-- Jdiff -->
+                    <mkdir dir="${project.build.directory}/site/jdiff/xml"/>
+
+                    <javadoc maxmemory="${jdiff.javadoc.maxmemory}" verbose="yes">
+                      <doclet name="org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsJDiffDoclet"
+                              path="${project.build.directory}/hadoop-annotations.jar:${project.build.directory}/jdiff.jar">
+                        <param name="-apidir" value="${project.build.directory}/site/jdiff/xml"/>
+                        <param name="-apiname" value="hadoop-core ${project.version}"/>
+                        <param name="${jdiff.stability}"/>
+                      </doclet>
+                      <packageset dir="${basedir}/src/main/java"/>
+                      <classpath>
+                        <path refid="maven.compile.classpath"/>
+                      </classpath>
+                    </javadoc>
+                    <javadoc sourcepath="${basedir}/src/main/java"
+                             destdir="${project.build.directory}/site/jdiff/xml"
+                             sourceFiles="${basedir}/dev-support/jdiff/Null.java"
+                             maxmemory="${jdiff.javadoc.maxmemory}">
+                      <doclet name="org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsJDiffDoclet"
+                              path="${project.build.directory}/hadoop-annotations.jar:${project.build.directory}/jdiff.jar">
+                        <param name="-oldapi" value="hadoop-core ${jdiff.stable.api}"/>
+                        <param name="-newapi" value="hadoop-core ${project.version}"/>
+                        <param name="-oldapidir" value="${basedir}/dev-support/jdiff"/>
+                        <param name="-newapidir" value="${project.build.directory}/site/jdiff/xml"/>
+                        <param name="-javadocold"
+                               value="http://hadoop.apache.org/docs/${jdiff.stable.api}/api/"/>
+                        <param name="-javadocnew" value="${project.build.directory}/site/api"/>
+                        <param name="-stats"/>
+                        <param name="${jdiff.stability}"/>
+                        <param name="${jdiff.compatibility}"/>
+                      </doclet>
+                      <classpath>
+                        <path refid="maven.compile.classpath"/>
+                      </classpath>
+                    </javadoc>
+
+                    <xslt style="${env.FINDBUGS_HOME}/src/xsl/default.xsl"
+                          in="${project.build.directory}/findbugsXml.xml"
+                          out="${project.build.directory}/site/findbugs.html"/>
+
+                  </target>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+
+    <profile>
+      <id>src</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-assembly-plugin</artifactId>
+            <dependencies>
+              <dependency>
+                <groupId>org.apache.hadoop</groupId>
+                <artifactId>hadoop-assemblies</artifactId>
+                <version>${hadoop.assemblies.version}</version>
+              </dependency>
+            </dependencies>
+            <executions>
+              <execution>
+                <id>pre-tar-src</id>
+                <phase>prepare-package</phase>
+                <goals>
+                  <goal>single</goal>
+                </goals>
+                <configuration>
+                  <appendAssemblyId>false</appendAssemblyId>
+                  <attach>false</attach>
+                  <finalName>${project.artifactId}-${project.version}</finalName>
+                  <descriptorRefs>
+                    <descriptorRef>hadoop-src</descriptorRef>
+                  </descriptorRefs>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+
+    <profile>
+      <id>tar</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-antrun-plugin</artifactId>
+            <executions>
+              <execution>
+                <id>pre-tar</id>
+                <phase>prepare-package</phase>
+                <goals>
+                  <goal>run</goal>
+                </goals>
+                <configuration>
+                  <target>
+                    <!-- Using Unix script to preserve symlinks -->
+                    <echo file="${project.build.directory}/tar-copynativelibs.sh">
+
+                      TAR='tar cf -'
+                      UNTAR='tar xfBp -'
+                      LIB_DIR="${project.build.directory}/native/target/usr/local/lib"
+                      if [ -d $${LIB_DIR} ] ; then
+                      TARGET_DIR="${project.build.directory}/${project.artifactId}-${project.version}/lib/native/${build.platform}"
+                      mkdir -p $${TARGET_DIR}
+                      cd $${LIB_DIR}
+                      $$TAR *hadoop* | (cd $${TARGET_DIR}/; $$UNTAR)
+                      if [ "${bundle.snappy}" = "true" ] ; then
+                      cd ${snappy.lib}
+                      $$TAR *snappy* | (cd $${TARGET_DIR}/; $$UNTAR)
+                      fi
+                      fi
+                    </echo>
+                    <exec executable="sh" dir="${project.build.directory}" failonerror="true">
+                      <arg line="./tar-copynativelibs.sh"/>
+                    </exec>
+                  </target>
+                </configuration>
+              </execution>
+              <execution>
+                <id>tar</id>
+                <phase>package</phase>
+                <goals>
+                  <goal>run</goal>
+                </goals>
+                <configuration>
+                  <target>
+                    <!-- Using Unix tar to preserve symlinks -->
+                    <exec executable="tar" dir="${project.build.directory}" failonerror="yes">
+                      <arg value="czf"/>
+                      <arg value="${project.build.directory}/${project.artifactId}-${project.version}.tar.gz"/>
+                      <arg value="${project.artifactId}-${project.version}"/>
+                    </exec>
+                  </target>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-assembly-plugin</artifactId>
+            <dependencies>
+              <dependency>
+                <groupId>org.apache.hadoop</groupId>
+                <artifactId>hadoop-assemblies</artifactId>
+                <version>${hadoop.assemblies.version}</version>
+              </dependency>
+            </dependencies>
+            <executions>
+              <execution>
+                <id>pre-tar</id>
+                <phase>prepare-package</phase>
+                <goals>
+                  <goal>single</goal>
+                </goals>
+                <configuration>
+                  <appendAssemblyId>false</appendAssemblyId>
+                  <attach>false</attach>
+                  <finalName>${project.artifactId}-${project.version}</finalName>
+                  <descriptorRefs>
+                    <descriptorRef>hadoop-tar</descriptorRef>
+                  </descriptorRefs>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+
+    <profile>
+      <id>bintar</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-antrun-plugin</artifactId>
+            <executions>
+              <execution>
+                <id>pre-bintar</id>
+                <phase>prepare-package</phase>
+                <goals>
+                  <goal>run</goal>
+                </goals>
+                <configuration>
+                  <target>
+                    <!-- Using Unix script to preserve symlinks -->
+                    <echo file="${project.build.directory}/bintar-copynativelibs.sh">
+
+                      TAR='tar cf -'
+                      UNTAR='tar xfBp -'
+                      LIB_DIR="${project.build.directory}/native/target/usr/local/lib"
+                      if [ -d $${LIB_DIR} ] ; then
+                      TARGET_DIR="${project.build.directory}/${project.artifactId}-${project.version}-bin/lib"
+                      mkdir -p $${TARGET_DIR}
+                      cd $${LIB_DIR}
+                      $$TAR *hadoop* | (cd $${TARGET_DIR}/; $$UNTAR)
+                      if [ "${bundle.snappy}" = "true" ] ; then
+                      cd ${snappy.lib}
+                      $$TAR *snappy* | (cd $${TARGET_DIR}/; $$UNTAR)
+                      fi
+                      fi
+                    </echo>
+                    <exec executable="sh" dir="${project.build.directory}" failonerror="true">
+                      <arg line="./bintar-copynativelibs.sh"/>
+                    </exec>
+                  </target>
+                </configuration>
+              </execution>
+              <execution>
+                <id>bintar</id>
+                <phase>package</phase>
+                <goals>
+                  <goal>run</goal>
+                </goals>
+                <configuration>
+                  <target>
+                    <!-- Using Unix tar to preserve symlinks -->
+                    <exec executable="tar" dir="${project.build.directory}" failonerror="yes">
+                      <arg value="czf"/>
+                      <arg value="${project.build.directory}/${project.artifactId}-${project.version}-bin.tar.gz"/>
+                      <arg value="${project.artifactId}-${project.version}-bin"/>
+                    </exec>
+                  </target>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-assembly-plugin</artifactId>
+            <dependencies>
+              <dependency>
+                <groupId>org.apache.hadoop</groupId>
+                <artifactId>hadoop-assemblies</artifactId>
+                <version>${hadoop.assemblies.version}</version>
+              </dependency>
+            </dependencies>
+            <executions>
+              <execution>
+                <id>pre-bintar</id>
+                <phase>prepare-package</phase>
+                <goals>
+                  <goal>single</goal>
+                </goals>
+                <configuration>
+                  <appendAssemblyId>false</appendAssemblyId>
+                  <attach>false</attach>
+                  <finalName>${project.artifactId}-${project.version}-bin</finalName>
+                  <descriptorRefs>
+                    <descriptorRef>hadoop-bintar</descriptorRef>
+                  </descriptorRefs>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
+</project>

+ 0 - 0
common/src/contrib/bash-tab-completion/README → hadoop-common/src/contrib/bash-tab-completion/README


+ 0 - 0
common/src/contrib/bash-tab-completion/hadoop.sh → hadoop-common/src/contrib/bash-tab-completion/hadoop.sh


+ 0 - 0
common/bin/hadoop → hadoop-common/src/main/bin/hadoop


+ 0 - 0
common/bin/hadoop-config.sh → hadoop-common/src/main/bin/hadoop-config.sh


+ 0 - 0
common/bin/hadoop-daemon.sh → hadoop-common/src/main/bin/hadoop-daemon.sh


+ 0 - 0
common/bin/hadoop-daemons.sh → hadoop-common/src/main/bin/hadoop-daemons.sh


+ 0 - 0
common/bin/rcc → hadoop-common/src/main/bin/rcc


+ 0 - 0
common/bin/slaves.sh → hadoop-common/src/main/bin/slaves.sh


+ 0 - 0
common/bin/start-all.sh → hadoop-common/src/main/bin/start-all.sh


+ 0 - 0
common/bin/stop-all.sh → hadoop-common/src/main/bin/stop-all.sh


+ 0 - 0
common/conf/hadoop-metrics.properties → hadoop-common/src/main/conf/hadoop-metrics.properties


+ 0 - 0
common/conf/hadoop-metrics2.properties → hadoop-common/src/main/conf/hadoop-metrics2.properties


+ 0 - 0
common/conf/log4j.properties → hadoop-common/src/main/conf/log4j.properties


+ 0 - 0
common/conf/ssl-client.xml.example → hadoop-common/src/main/conf/ssl-client.xml.example


+ 0 - 0
common/conf/ssl-server.xml.example → hadoop-common/src/main/conf/ssl-server.xml.example


+ 0 - 0
common/src/docs/changes/ChangesFancyStyle.css → hadoop-common/src/main/docs/changes/ChangesFancyStyle.css


+ 0 - 0
common/src/docs/changes/ChangesSimpleStyle.css → hadoop-common/src/main/docs/changes/ChangesSimpleStyle.css


+ 0 - 0
common/src/docs/changes/changes2html.pl → hadoop-common/src/main/docs/changes/changes2html.pl


+ 0 - 0
common/src/docs/forrest.properties → hadoop-common/src/main/docs/forrest.properties


+ 0 - 0
common/src/docs/releasenotes.html → hadoop-common/src/main/docs/releasenotes.html


+ 0 - 0
common/src/docs/src/documentation/README.txt → hadoop-common/src/main/docs/src/documentation/README.txt


+ 0 - 0
common/src/docs/src/documentation/classes/CatalogManager.properties → hadoop-common/src/main/docs/src/documentation/classes/CatalogManager.properties


+ 0 - 0
common/src/docs/src/documentation/conf/cli.xconf → hadoop-common/src/main/docs/src/documentation/conf/cli.xconf


+ 0 - 0
common/src/docs/src/documentation/content/xdocs/Superusers.xml → hadoop-common/src/main/docs/src/documentation/content/xdocs/Superusers.xml


+ 0 - 0
common/src/docs/src/documentation/content/xdocs/cluster_setup.xml → hadoop-common/src/main/docs/src/documentation/content/xdocs/cluster_setup.xml


+ 0 - 0
common/src/docs/src/documentation/content/xdocs/commands_manual.xml → hadoop-common/src/main/docs/src/documentation/content/xdocs/commands_manual.xml


+ 0 - 0
common/src/docs/src/documentation/content/xdocs/deployment_layout.xml → hadoop-common/src/main/docs/src/documentation/content/xdocs/deployment_layout.xml


+ 0 - 0
common/src/docs/src/documentation/content/xdocs/file_system_shell.xml → hadoop-common/src/main/docs/src/documentation/content/xdocs/file_system_shell.xml


+ 0 - 0
common/src/docs/src/documentation/content/xdocs/index.xml → hadoop-common/src/main/docs/src/documentation/content/xdocs/index.xml


+ 0 - 0
common/src/docs/src/documentation/content/xdocs/native_libraries.xml → hadoop-common/src/main/docs/src/documentation/content/xdocs/native_libraries.xml


+ 0 - 0
common/src/docs/src/documentation/content/xdocs/service_level_auth.xml → hadoop-common/src/main/docs/src/documentation/content/xdocs/service_level_auth.xml


+ 0 - 0
common/src/docs/src/documentation/content/xdocs/single_node_setup.xml → hadoop-common/src/main/docs/src/documentation/content/xdocs/single_node_setup.xml


+ 0 - 0
common/src/docs/src/documentation/content/xdocs/site.xml → hadoop-common/src/main/docs/src/documentation/content/xdocs/site.xml


+ 0 - 0
common/src/docs/src/documentation/content/xdocs/tabs.xml → hadoop-common/src/main/docs/src/documentation/content/xdocs/tabs.xml


+ 0 - 0
common/src/docs/src/documentation/resources/images/architecture.gif → hadoop-common/src/main/docs/src/documentation/resources/images/architecture.gif


+ 0 - 0
common/src/docs/src/documentation/resources/images/common-logo.jpg → hadoop-common/src/main/docs/src/documentation/resources/images/common-logo.jpg


+ 0 - 0
common/src/docs/src/documentation/resources/images/core-logo.gif → hadoop-common/src/main/docs/src/documentation/resources/images/core-logo.gif


+ 0 - 0
common/src/docs/src/documentation/resources/images/favicon.ico → hadoop-common/src/main/docs/src/documentation/resources/images/favicon.ico


+ 0 - 0
common/src/docs/src/documentation/resources/images/hadoop-logo-big.jpg → hadoop-common/src/main/docs/src/documentation/resources/images/hadoop-logo-big.jpg


+ 0 - 0
common/src/docs/src/documentation/resources/images/hadoop-logo.jpg → hadoop-common/src/main/docs/src/documentation/resources/images/hadoop-logo.jpg


+ 0 - 0
common/src/docs/src/documentation/resources/images/hdfsarchitecture.gif → hadoop-common/src/main/docs/src/documentation/resources/images/hdfsarchitecture.gif


+ 0 - 0
common/src/docs/src/documentation/resources/images/hdfsarchitecture.odg → hadoop-common/src/main/docs/src/documentation/resources/images/hdfsarchitecture.odg


+ 0 - 0
common/src/docs/src/documentation/resources/images/hdfsarchitecture.png → hadoop-common/src/main/docs/src/documentation/resources/images/hdfsarchitecture.png


+ 0 - 0
common/src/docs/src/documentation/resources/images/hdfsdatanodes.gif → hadoop-common/src/main/docs/src/documentation/resources/images/hdfsdatanodes.gif


+ 0 - 0
common/src/docs/src/documentation/resources/images/hdfsdatanodes.odg → hadoop-common/src/main/docs/src/documentation/resources/images/hdfsdatanodes.odg


+ 0 - 0
common/src/docs/src/documentation/resources/images/hdfsdatanodes.png → hadoop-common/src/main/docs/src/documentation/resources/images/hdfsdatanodes.png


+ 0 - 0
common/src/docs/src/documentation/skinconf.xml → hadoop-common/src/main/docs/src/documentation/skinconf.xml


部分文件因为文件数量过多而无法显示