瀏覽代碼

Fix hdfs ant targets

git-svn-id: https://svn.apache.org/repos/asf/hadoop/core/branches/HADOOP-4687/hdfs@780253 13f79535-47bb-0310-9956-ffa450edef68
Giridharan Kesavan 16 年之前
父節點
當前提交
46cb751a5f
共有 100 個文件被更改,包括 374 次插入3 次删除
  1. 304 0
      src/contrib/build-contrib.xml
  2. 67 0
      src/contrib/build.xml
  3. 0 0
      src/test/hdfs/org/apache/hadoop/cli/TestHDFSCLI.java
  4. 0 0
      src/test/hdfs/org/apache/hadoop/cli/clitest_data/data120bytes
  5. 0 0
      src/test/hdfs/org/apache/hadoop/cli/clitest_data/data15bytes
  6. 0 0
      src/test/hdfs/org/apache/hadoop/cli/clitest_data/data30bytes
  7. 0 0
      src/test/hdfs/org/apache/hadoop/cli/clitest_data/data60bytes
  8. 0 0
      src/test/hdfs/org/apache/hadoop/cli/testHDFSConf.xml
  9. 0 0
      src/test/hdfs/org/apache/hadoop/fs/TestGlobPaths.java
  10. 0 0
      src/test/hdfs/org/apache/hadoop/fs/TestUrlStreamHandler.java
  11. 3 3
      src/test/hdfs/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java
  12. 0 0
      src/test/hdfs/org/apache/hadoop/fs/loadGenerator/TestLoadGenerator.java
  13. 0 0
      src/test/hdfs/org/apache/hadoop/fs/permission/TestStickyBit.java
  14. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/AppendTestUtil.java
  15. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/BenchmarkThroughput.java
  16. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/DFSTestUtil.java
  17. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/DataNodeCluster.java
  18. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/MiniDFSCluster.java
  19. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestAbandonBlock.java
  20. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestBlocksScheduledCounter.java
  21. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestCrcCorruption.java
  22. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestDFSClientRetries.java
  23. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestDFSFinalize.java
  24. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestDFSMkdirs.java
  25. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestDFSPermission.java
  26. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestDFSRename.java
  27. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestDFSRollback.java
  28. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestDFSShell.java
  29. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestDFSShellGenericOptions.java
  30. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestDFSStartupVersions.java
  31. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestDFSStorageStateRecovery.java
  32. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestDFSUpgrade.java
  33. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestDFSUpgradeFromImage.java
  34. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestDataTransferProtocol.java
  35. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestDatanodeBlockScanner.java
  36. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestDatanodeDeath.java
  37. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestDatanodeReport.java
  38. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestDecommission.java
  39. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestDefaultNameNodePort.java
  40. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
  41. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestFSInputChecker.java
  42. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestFSOutputSummer.java
  43. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestFileAppend.java
  44. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestFileAppend2.java
  45. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestFileAppend3.java
  46. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestFileCorruption.java
  47. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestFileCreation.java
  48. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestFileCreationClient.java
  49. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestFileCreationDelete.java
  50. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestFileCreationEmpty.java
  51. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestFileCreationNamenodeRestart.java
  52. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestFileStatus.java
  53. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestGetBlocks.java
  54. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestHDFSFileSystemContract.java
  55. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestHDFSServerPorts.java
  56. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestHDFSTrash.java
  57. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestInjectionForSimulatedStorage.java
  58. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestLease.java
  59. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestLeaseRecovery.java
  60. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestLeaseRecovery2.java
  61. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestLocalDFS.java
  62. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestMissingBlocksAlert.java
  63. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestModTime.java
  64. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestPread.java
  65. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestQuota.java
  66. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestRenameWhileOpen.java
  67. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestReplication.java
  68. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestRestartDFS.java
  69. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestSafeMode.java
  70. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestSeekBug.java
  71. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestSetTimes.java
  72. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestSetrepDecreasing.java
  73. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestSetrepIncreasing.java
  74. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/TestSmallBlock.java
  75. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/UpgradeUtilities.java
  76. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/hadoop-14-dfs-dir.tgz
  77. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/hadoop-dfs-dir.txt
  78. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/server/balancer/TestBalancer.java
  79. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/server/common/TestDistributedUpgrade.java
  80. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java
  81. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestBlockReplacement.java
  82. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMetrics.java
  83. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java
  84. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestDiskError.java
  85. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestInterDatanodeProtocol.java
  86. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestSimulatedFSDataset.java
  87. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/CreateEditsLog.java
  88. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/FileNameGenerator.java
  89. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java
  90. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestBackupNode.java
  91. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestCheckpoint.java
  92. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestComputeInvalidateWork.java
  93. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestDatanodeDescriptor.java
  94. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestEditLog.java
  95. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestFileLimit.java
  96. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestFsck.java
  97. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestHeartbeatHandling.java
  98. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestHost2NodesMap.java
  99. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestNNThroughputBenchmark.java
  100. 0 0
      src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestNameEditsConfigs.java

+ 304 - 0
src/contrib/build-contrib.xml

@@ -0,0 +1,304 @@
+<?xml version="1.0"?>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<!-- Imported by contrib/*/build.xml files to share generic targets. -->
+
+<project name="hadoopbuildcontrib" xmlns:ivy="antlib:org.apache.ivy.ant">
+
+  <property name="name" value="${ant.project.name}"/>
+  <property name="root" value="${basedir}"/>
+
+  <!-- Load all the default properties, and any the user wants    -->
+  <!-- to contribute (without having to type -D or edit this file -->
+  <property file="${user.home}/${name}.build.properties" />
+  <property file="${root}/build.properties" />
+
+  <property name="hadoop.root" location="${root}/../../../"/>
+  <property name="src.dir"  location="${root}/src/java"/>
+  <property name="src.test" location="${root}/src/test"/>
+  <property name="src.examples" location="${root}/src/examples"/>
+
+  <available file="${src.examples}" type="dir" property="examples.available"/>
+  <available file="${src.test}" type="dir" property="test.available"/>
+
+  <property name="conf.dir" location="${hadoop.root}/conf"/>
+  <property name="test.junit.output.format" value="plain"/>
+  <property name="test.output" value="no"/>
+  <property name="test.timeout" value="900000"/>
+  <property name="build.dir" location="${hadoop.root}/build/contrib/${name}"/>
+  <property name="build.classes" location="${build.dir}/classes"/>
+  <property name="build.test" location="${build.dir}/test"/>
+  <property name="build.examples" location="${build.dir}/examples"/>
+  <property name="hadoop.log.dir" location="${build.dir}/test/logs"/>
+  <!-- all jars together -->
+  <property name="javac.deprecation" value="off"/>
+  <property name="javac.debug" value="on"/>
+  <property name="build.ivy.lib.dir" value="${hadoop.root}/build/ivy/lib"/> 
+
+  <property name="javadoc.link"
+            value="http://java.sun.com/j2se/1.4/docs/api/"/>
+
+  <property name="build.encoding" value="ISO-8859-1"/>
+
+  <fileset id="lib.jars" dir="${root}" includes="lib/*.jar"/>
+
+
+   <!-- IVY properties set here -->
+  <property name="ivy.dir" location="ivy" />
+  <property name="ivysettings.xml" location="${hadoop.root}/ivy/ivysettings.xml"/>
+  <loadproperties srcfile="${ivy.dir}/libraries.properties"/>
+  <loadproperties srcfile="${hadoop.root}/ivy/libraries.properties"/>
+  <property name="ivy.jar" location="${hadoop.root}/ivy/ivy-${ivy.version}.jar"/>
+  <property name="ivy_repo_url" 
+	value="http://repo2.maven.org/maven2/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar" />
+  <property name="build.dir" location="build" />
+  <property name="build.ivy.dir" location="${build.dir}/ivy" />
+  <property name="build.ivy.lib.dir" location="${build.ivy.dir}/lib" />
+  <property name="build.ivy.report.dir" location="${build.ivy.dir}/report" />
+  <property name="common.ivy.lib.dir" location="${build.ivy.lib.dir}/${ant.project.name}/common"/> 
+
+  <!--this is the naming policy for artifacts we want pulled down-->
+  <property name="ivy.artifact.retrieve.pattern"
+    			value="${ant.project.name}/[conf]/[artifact]-[revision].[ext]"/>
+
+  <!-- the normal classpath -->
+  <path id="contrib-classpath">
+    <pathelement location="${build.classes}"/>
+    <fileset refid="lib.jars"/>
+    <pathelement location="${hadoop.root}/build/classes"/>
+    <fileset dir="${hadoop.root}/lib">
+      <include name="**/*.jar" />
+    </fileset>
+    <path refid="${ant.project.name}.common-classpath"/>
+    <pathelement path="${clover.jar}"/>
+  </path>
+
+  <!-- the unit test classpath -->
+  <path id="test.classpath">
+    <pathelement location="${build.test}" />
+    <pathelement location="${hadoop.root}/build/test/classes"/>
+    <pathelement location="${hadoop.root}/build/test/core/classes"/>
+    <pathelement location="${hadoop.root}/build/test/hdfs/classes"/>
+    <pathelement location="${hadoop.root}/build/test/mapred/classes"/>
+    <pathelement location="${hadoop.root}/src/contrib/test"/>
+    <pathelement location="${conf.dir}"/>
+    <pathelement location="${hadoop.root}/build"/>
+    <pathelement location="${build.examples}"/>
+    <path refid="contrib-classpath"/>
+  </path>
+
+
+  <!-- to be overridden by sub-projects -->
+  <target name="check-contrib"/>
+  <target name="init-contrib"/>
+
+  <!-- ====================================================== -->
+  <!-- Stuff needed by all targets                            -->
+  <!-- ====================================================== -->
+  <target name="init" depends="check-contrib" unless="skip.contrib">
+    <echo message="contrib: ${name}"/>
+    <mkdir dir="${build.dir}"/>
+    <mkdir dir="${build.classes}"/>
+    <mkdir dir="${build.test}"/>
+    <mkdir dir="${build.examples}"/>
+    <mkdir dir="${hadoop.log.dir}"/>
+    <antcall target="init-contrib"/>
+  </target>
+
+
+  <!-- ====================================================== -->
+  <!-- Compile a Hadoop contrib's files                       -->
+  <!-- ====================================================== -->
+  <target name="compile" depends="init, ivy-retrieve-common" unless="skip.contrib">
+    <echo message="contrib: ${name}"/>
+    <javac
+     encoding="${build.encoding}"
+     srcdir="${src.dir}"
+     includes="**/*.java"
+     destdir="${build.classes}"
+     debug="${javac.debug}"
+     deprecation="${javac.deprecation}">
+     <classpath refid="contrib-classpath"/>
+    </javac>
+  </target>
+
+
+  <!-- ======================================================= -->
+  <!-- Compile a Hadoop contrib's example files (if available) -->
+  <!-- ======================================================= -->
+  <target name="compile-examples" depends="compile" if="examples.available">
+    <echo message="contrib: ${name}"/>
+    <javac
+     encoding="${build.encoding}"
+     srcdir="${src.examples}"
+     includes="**/*.java"
+     destdir="${build.examples}"
+     debug="${javac.debug}">
+     <classpath refid="contrib-classpath"/>
+    </javac>
+  </target>
+
+
+  <!-- ================================================================== -->
+  <!-- Compile test code                                                  -->
+  <!-- ================================================================== -->
+  <target name="compile-test" depends="compile-examples" if="test.available">
+    <echo message="contrib: ${name}"/>
+    <javac
+     encoding="${build.encoding}"
+     srcdir="${src.test}"
+     includes="**/*.java"
+     destdir="${build.test}"
+     debug="${javac.debug}">
+    <classpath refid="test.classpath"/>
+    </javac>
+  </target>
+  
+
+  <!-- ====================================================== -->
+  <!-- Make a Hadoop contrib's jar                            -->
+  <!-- ====================================================== -->
+  <target name="jar" depends="compile" unless="skip.contrib">
+    <echo message="contrib: ${name}"/>
+    <jar
+      jarfile="${build.dir}/hadoop-${version}-${name}.jar"
+      basedir="${build.classes}"      
+    />
+  </target>
+
+  
+  <!-- ====================================================== -->
+  <!-- Make a Hadoop contrib's examples jar                   -->
+  <!-- ====================================================== -->
+  <target name="jar-examples" depends="compile-examples"
+          if="examples.available" unless="skip.contrib">
+    <echo message="contrib: ${name}"/>
+    <jar jarfile="${build.dir}/hadoop-${version}-${name}-examples.jar">
+      <fileset dir="${build.classes}">
+      </fileset>
+      <fileset dir="${build.examples}">
+      </fileset>
+    </jar>
+  </target>
+  
+  <!-- ====================================================== -->
+  <!-- Package a Hadoop contrib                               -->
+  <!-- ====================================================== -->
+  <target name="package" depends="jar, jar-examples" unless="skip.contrib"> 
+    <mkdir dir="${dist.dir}/contrib/${name}"/>
+    <copy todir="${dist.dir}/contrib/${name}" includeEmptyDirs="false" flatten="true">
+      <fileset dir="${build.dir}">
+        <include name="hadoop-${version}-${name}.jar" />
+      </fileset>
+    </copy>
+  </target>
+  
+  <!-- ================================================================== -->
+  <!-- Run unit tests                                                     -->
+  <!-- ================================================================== -->
+  <target name="test" depends="compile-test, compile" if="test.available">
+    <echo message="contrib: ${name}"/>
+    <delete dir="${hadoop.log.dir}"/>
+    <mkdir dir="${hadoop.log.dir}"/>
+    <junit
+      printsummary="yes" showoutput="${test.output}" 
+      haltonfailure="no" fork="yes" maxmemory="256m"
+      errorProperty="tests.failed" failureProperty="tests.failed"
+      timeout="${test.timeout}">
+      
+      <sysproperty key="test.build.data" value="${build.test}/data"/>
+      <sysproperty key="build.test" value="${build.test}"/>
+      <sysproperty key="contrib.name" value="${name}"/>
+      
+      <!-- requires fork=yes for: 
+        relative File paths to use the specified user.dir 
+        classpath to use build/contrib/*.jar
+      -->
+      <sysproperty key="user.dir" value="${build.test}/data"/>
+      
+      <sysproperty key="fs.default.name" value="${fs.default.name}"/>
+      <sysproperty key="hadoop.test.localoutputfile" value="${hadoop.test.localoutputfile}"/>
+      <sysproperty key="hadoop.log.dir" value="${hadoop.log.dir}"/> 
+      <sysproperty key="taskcontroller-path" value="${taskcontroller-path}"/>
+      <sysproperty key="taskcontroller-user" value="${taskcontroller-user}"/>
+      <classpath refid="test.classpath"/>
+      <formatter type="${test.junit.output.format}" />
+      <batchtest todir="${build.test}" unless="testcase">
+        <fileset dir="${src.test}"
+                 includes="**/Test*.java" excludes="**/${test.exclude}.java" />
+      </batchtest>
+      <batchtest todir="${build.test}" if="testcase">
+        <fileset dir="${src.test}" includes="**/${testcase}.java"/>
+      </batchtest>
+    </junit>
+    <fail if="tests.failed">Tests failed!</fail>
+  </target>
+
+  <!-- ================================================================== -->
+  <!-- Clean.  Delete the build files, and their directories              -->
+  <!-- ================================================================== -->
+  <target name="clean">
+    <echo message="contrib: ${name}"/>
+    <delete dir="${build.dir}"/>
+  </target>
+
+  <target name="ivy-probe-antlib" >
+    <condition property="ivy.found">
+      <typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/>
+    </condition>
+  </target>
+
+
+  <target name="ivy-download" description="To download ivy " unless="offline">
+    <get src="${ivy_repo_url}" dest="${ivy.jar}" usetimestamp="true"/>
+  </target>
+
+  <target name="ivy-init-antlib" depends="ivy-download,ivy-probe-antlib" unless="ivy.found">
+    <typedef uri="antlib:org.apache.ivy.ant" onerror="fail"
+      loaderRef="ivyLoader">
+      <classpath>
+        <pathelement location="${ivy.jar}"/>
+      </classpath>
+    </typedef>
+    <fail >
+      <condition >
+        <not>
+          <typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/>
+        </not>
+      </condition>
+      You need Apache Ivy 2.0 or later from http://ant.apache.org/
+      It could not be loaded from ${ivy_repo_url}
+    </fail>
+  </target>
+
+  <target name="ivy-init" depends="ivy-init-antlib">
+    <ivy:configure settingsid="${ant.project.name}.ivy.settings" file="${ivysettings.xml}"/>
+  </target>
+
+  <target name="ivy-resolve-common" depends="ivy-init">
+    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="common" />
+  </target>
+
+  <target name="ivy-retrieve-common" depends="ivy-resolve-common"
+    description="Retrieve Ivy-managed artifacts for the compile/test configurations">
+    <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings" 
+      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}" sync="true" />
+    <ivy:cachepath pathid="${ant.project.name}.common-classpath" conf="common" />
+  </target>
+</project>

+ 67 - 0
src/contrib/build.xml

@@ -0,0 +1,67 @@
+<?xml version="1.0"?>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<project name="hadoopcontrib" default="compile" basedir=".">
+  
+  <!-- In case one of the contrib subdirectories -->
+  <!-- fails the build or test targets and you cannot fix it: -->
+  <!-- Then add to fileset: excludes="badcontrib/build.xml" -->
+
+  <!-- ====================================================== -->
+  <!-- Compile contribs.                                      -->
+  <!-- ====================================================== -->
+  <target name="compile">
+    <subant target="compile">
+      <fileset dir="." includes="*/build.xml"/>
+    </subant>
+  </target>
+  
+  <!-- ====================================================== -->
+  <!-- Package contrib jars.                                  -->
+  <!-- ====================================================== -->
+  <target name="package">
+    <subant target="package">
+      <fileset dir="." includes="*/build.xml"/>
+    </subant>
+  </target>
+  
+  <!-- ====================================================== -->
+  <!-- Test all the contribs.                               -->
+  <!-- ====================================================== -->
+  <target name="test">
+    <subant target="test">
+      <fileset dir="." includes="hdfsproxy/build.xml"/>
+      <fileset dir="." includes="streaming/build.xml"/>
+      <fileset dir="." includes="fairscheduler/build.xml"/>
+      <fileset dir="." includes="capacity-scheduler/build.xml"/>
+      <fileset dir="." includes="mrunit/build.xml"/>
+    </subant>
+  </target>
+  
+  
+  <!-- ====================================================== -->
+  <!-- Clean all the contribs.                              -->
+  <!-- ====================================================== -->
+  <target name="clean">
+    <subant target="clean">
+      <fileset dir="." includes="*/build.xml"/>
+    </subant>
+  </target>
+
+</project>

+ 0 - 0
src/test/org/apache/hadoop/cli/TestHDFSCLI.java → src/test/hdfs/org/apache/hadoop/cli/TestHDFSCLI.java


+ 0 - 0
src/test/org/apache/hadoop/cli/clitest_data/data120bytes → src/test/hdfs/org/apache/hadoop/cli/clitest_data/data120bytes


+ 0 - 0
src/test/org/apache/hadoop/cli/clitest_data/data15bytes → src/test/hdfs/org/apache/hadoop/cli/clitest_data/data15bytes


+ 0 - 0
src/test/org/apache/hadoop/cli/clitest_data/data30bytes → src/test/hdfs/org/apache/hadoop/cli/clitest_data/data30bytes


+ 0 - 0
src/test/org/apache/hadoop/cli/clitest_data/data60bytes → src/test/hdfs/org/apache/hadoop/cli/clitest_data/data60bytes


+ 0 - 0
src/test/org/apache/hadoop/cli/testHDFSConf.xml → src/test/hdfs/org/apache/hadoop/cli/testHDFSConf.xml


+ 0 - 0
src/test/org/apache/hadoop/fs/TestGlobPaths.java → src/test/hdfs/org/apache/hadoop/fs/TestGlobPaths.java


+ 0 - 0
src/test/org/apache/hadoop/fs/TestUrlStreamHandler.java → src/test/hdfs/org/apache/hadoop/fs/TestUrlStreamHandler.java


+ 3 - 3
src/test/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java → src/test/hdfs/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java

@@ -24,7 +24,7 @@ import org.apache.ftpserver.DefaultFtpServerContext;
 import org.apache.ftpserver.FtpServer;
 import org.apache.ftpserver.ftplet.Authority;
 import org.apache.ftpserver.ftplet.UserManager;
-import org.apache.ftpserver.listener.mina.MinaListener;
+import org.apache.ftpserver.listener.nio.NioListener;
 import org.apache.ftpserver.usermanager.BaseUser;
 import org.apache.ftpserver.usermanager.WritePermission;
 import org.apache.hadoop.conf.Configuration;
@@ -54,7 +54,7 @@ public class TestFTPFileSystem extends TestCase {
   private void startServer() {
     try {
       DefaultFtpServerContext context = new DefaultFtpServerContext(false);
-      MinaListener listener = new MinaListener();
+      NioListener listener = new NioListener();
       // Set port to 0 for OS to give a free port
       listener.setPort(0);
       context.setListener("default", listener);
@@ -93,7 +93,7 @@ public class TestFTPFileSystem extends TestCase {
     defaultConf = new Configuration();
     localFs = FileSystem.getLocal(defaultConf);
     ftpServerConfig = new Path(localFs.getWorkingDirectory(), "res");
-    MinaListener listener = (MinaListener) server.getServerContext()
+    NioListener listener = (NioListener) server.getServerContext()
         .getListener("default");
     int serverPort = listener.getPort();
     ftpFs = FileSystem.get(URI.create("ftp://admin:admin@localhost:"

+ 0 - 0
src/test/org/apache/hadoop/fs/loadGenerator/TestLoadGenerator.java → src/test/hdfs/org/apache/hadoop/fs/loadGenerator/TestLoadGenerator.java


+ 0 - 0
src/test/org/apache/hadoop/fs/permission/TestStickyBit.java → src/test/hdfs/org/apache/hadoop/fs/permission/TestStickyBit.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/AppendTestUtil.java → src/test/hdfs/org/apache/hadoop/hdfs/AppendTestUtil.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/BenchmarkThroughput.java → src/test/hdfs/org/apache/hadoop/hdfs/BenchmarkThroughput.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/DFSTestUtil.java → src/test/hdfs/org/apache/hadoop/hdfs/DFSTestUtil.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/DataNodeCluster.java → src/test/hdfs/org/apache/hadoop/hdfs/DataNodeCluster.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/MiniDFSCluster.java → src/test/hdfs/org/apache/hadoop/hdfs/MiniDFSCluster.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestAbandonBlock.java → src/test/hdfs/org/apache/hadoop/hdfs/TestAbandonBlock.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestBlocksScheduledCounter.java → src/test/hdfs/org/apache/hadoop/hdfs/TestBlocksScheduledCounter.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestCrcCorruption.java → src/test/hdfs/org/apache/hadoop/hdfs/TestCrcCorruption.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestDFSClientRetries.java → src/test/hdfs/org/apache/hadoop/hdfs/TestDFSClientRetries.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestDFSFinalize.java → src/test/hdfs/org/apache/hadoop/hdfs/TestDFSFinalize.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestDFSMkdirs.java → src/test/hdfs/org/apache/hadoop/hdfs/TestDFSMkdirs.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestDFSPermission.java → src/test/hdfs/org/apache/hadoop/hdfs/TestDFSPermission.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestDFSRename.java → src/test/hdfs/org/apache/hadoop/hdfs/TestDFSRename.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestDFSRollback.java → src/test/hdfs/org/apache/hadoop/hdfs/TestDFSRollback.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestDFSShell.java → src/test/hdfs/org/apache/hadoop/hdfs/TestDFSShell.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestDFSShellGenericOptions.java → src/test/hdfs/org/apache/hadoop/hdfs/TestDFSShellGenericOptions.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestDFSStartupVersions.java → src/test/hdfs/org/apache/hadoop/hdfs/TestDFSStartupVersions.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestDFSStorageStateRecovery.java → src/test/hdfs/org/apache/hadoop/hdfs/TestDFSStorageStateRecovery.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestDFSUpgrade.java → src/test/hdfs/org/apache/hadoop/hdfs/TestDFSUpgrade.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestDFSUpgradeFromImage.java → src/test/hdfs/org/apache/hadoop/hdfs/TestDFSUpgradeFromImage.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestDataTransferProtocol.java → src/test/hdfs/org/apache/hadoop/hdfs/TestDataTransferProtocol.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestDatanodeBlockScanner.java → src/test/hdfs/org/apache/hadoop/hdfs/TestDatanodeBlockScanner.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestDatanodeDeath.java → src/test/hdfs/org/apache/hadoop/hdfs/TestDatanodeDeath.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestDatanodeReport.java → src/test/hdfs/org/apache/hadoop/hdfs/TestDatanodeReport.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestDecommission.java → src/test/hdfs/org/apache/hadoop/hdfs/TestDecommission.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestDefaultNameNodePort.java → src/test/hdfs/org/apache/hadoop/hdfs/TestDefaultNameNodePort.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestDistributedFileSystem.java → src/test/hdfs/org/apache/hadoop/hdfs/TestDistributedFileSystem.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestFSInputChecker.java → src/test/hdfs/org/apache/hadoop/hdfs/TestFSInputChecker.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestFSOutputSummer.java → src/test/hdfs/org/apache/hadoop/hdfs/TestFSOutputSummer.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestFileAppend.java → src/test/hdfs/org/apache/hadoop/hdfs/TestFileAppend.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestFileAppend2.java → src/test/hdfs/org/apache/hadoop/hdfs/TestFileAppend2.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestFileAppend3.java → src/test/hdfs/org/apache/hadoop/hdfs/TestFileAppend3.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestFileCorruption.java → src/test/hdfs/org/apache/hadoop/hdfs/TestFileCorruption.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestFileCreation.java → src/test/hdfs/org/apache/hadoop/hdfs/TestFileCreation.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestFileCreationClient.java → src/test/hdfs/org/apache/hadoop/hdfs/TestFileCreationClient.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestFileCreationDelete.java → src/test/hdfs/org/apache/hadoop/hdfs/TestFileCreationDelete.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestFileCreationEmpty.java → src/test/hdfs/org/apache/hadoop/hdfs/TestFileCreationEmpty.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestFileCreationNamenodeRestart.java → src/test/hdfs/org/apache/hadoop/hdfs/TestFileCreationNamenodeRestart.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestFileStatus.java → src/test/hdfs/org/apache/hadoop/hdfs/TestFileStatus.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestGetBlocks.java → src/test/hdfs/org/apache/hadoop/hdfs/TestGetBlocks.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestHDFSFileSystemContract.java → src/test/hdfs/org/apache/hadoop/hdfs/TestHDFSFileSystemContract.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestHDFSServerPorts.java → src/test/hdfs/org/apache/hadoop/hdfs/TestHDFSServerPorts.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestHDFSTrash.java → src/test/hdfs/org/apache/hadoop/hdfs/TestHDFSTrash.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestInjectionForSimulatedStorage.java → src/test/hdfs/org/apache/hadoop/hdfs/TestInjectionForSimulatedStorage.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestLease.java → src/test/hdfs/org/apache/hadoop/hdfs/TestLease.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestLeaseRecovery.java → src/test/hdfs/org/apache/hadoop/hdfs/TestLeaseRecovery.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestLeaseRecovery2.java → src/test/hdfs/org/apache/hadoop/hdfs/TestLeaseRecovery2.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestLocalDFS.java → src/test/hdfs/org/apache/hadoop/hdfs/TestLocalDFS.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestMissingBlocksAlert.java → src/test/hdfs/org/apache/hadoop/hdfs/TestMissingBlocksAlert.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestModTime.java → src/test/hdfs/org/apache/hadoop/hdfs/TestModTime.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestPread.java → src/test/hdfs/org/apache/hadoop/hdfs/TestPread.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestQuota.java → src/test/hdfs/org/apache/hadoop/hdfs/TestQuota.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestRenameWhileOpen.java → src/test/hdfs/org/apache/hadoop/hdfs/TestRenameWhileOpen.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestReplication.java → src/test/hdfs/org/apache/hadoop/hdfs/TestReplication.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestRestartDFS.java → src/test/hdfs/org/apache/hadoop/hdfs/TestRestartDFS.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestSafeMode.java → src/test/hdfs/org/apache/hadoop/hdfs/TestSafeMode.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestSeekBug.java → src/test/hdfs/org/apache/hadoop/hdfs/TestSeekBug.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestSetTimes.java → src/test/hdfs/org/apache/hadoop/hdfs/TestSetTimes.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestSetrepDecreasing.java → src/test/hdfs/org/apache/hadoop/hdfs/TestSetrepDecreasing.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestSetrepIncreasing.java → src/test/hdfs/org/apache/hadoop/hdfs/TestSetrepIncreasing.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/TestSmallBlock.java → src/test/hdfs/org/apache/hadoop/hdfs/TestSmallBlock.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/UpgradeUtilities.java → src/test/hdfs/org/apache/hadoop/hdfs/UpgradeUtilities.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/hadoop-14-dfs-dir.tgz → src/test/hdfs/org/apache/hadoop/hdfs/hadoop-14-dfs-dir.tgz


+ 0 - 0
src/test/org/apache/hadoop/hdfs/hadoop-dfs-dir.txt → src/test/hdfs/org/apache/hadoop/hdfs/hadoop-dfs-dir.txt


+ 0 - 0
src/test/org/apache/hadoop/hdfs/server/balancer/TestBalancer.java → src/test/hdfs/org/apache/hadoop/hdfs/server/balancer/TestBalancer.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/server/common/TestDistributedUpgrade.java → src/test/hdfs/org/apache/hadoop/hdfs/server/common/TestDistributedUpgrade.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java → src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/server/datanode/TestBlockReplacement.java → src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestBlockReplacement.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMetrics.java → src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMetrics.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java → src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/server/datanode/TestDiskError.java → src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestDiskError.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/server/datanode/TestInterDatanodeProtocol.java → src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestInterDatanodeProtocol.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/server/datanode/TestSimulatedFSDataset.java → src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestSimulatedFSDataset.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/server/namenode/CreateEditsLog.java → src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/CreateEditsLog.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/server/namenode/FileNameGenerator.java → src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/FileNameGenerator.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java → src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/server/namenode/TestBackupNode.java → src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestBackupNode.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/server/namenode/TestCheckpoint.java → src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestCheckpoint.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/server/namenode/TestComputeInvalidateWork.java → src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestComputeInvalidateWork.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/server/namenode/TestDatanodeDescriptor.java → src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestDatanodeDescriptor.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/server/namenode/TestEditLog.java → src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestEditLog.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/server/namenode/TestFileLimit.java → src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestFileLimit.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/server/namenode/TestFsck.java → src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestFsck.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/server/namenode/TestHeartbeatHandling.java → src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestHeartbeatHandling.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/server/namenode/TestHost2NodesMap.java → src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestHost2NodesMap.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/server/namenode/TestNNThroughputBenchmark.java → src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestNNThroughputBenchmark.java


+ 0 - 0
src/test/org/apache/hadoop/hdfs/server/namenode/TestNameEditsConfigs.java → src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestNameEditsConfigs.java


Some files were not shown because too many files changed in this diff