瀏覽代碼

Fix for HADOOP-70. Unit tests should have their own hadoop-site.xml and mapred-default.xml, so that local modifications to these files in conf/ don't alter unit testing. Also rename TestDFS so that it is not normally run, and add a new test target which runs tests using the config files in conf/.

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk@385652 13f79535-47bb-0310-9956-ffa450edef68
Doug Cutting 19 年之前
父節點
當前提交
dc207a0a5a
共有 4 個文件被更改,包括 47 次插入9 次删除
  1. 22 4
      build.xml
  2. 10 0
      src/test/hadoop-site.xml
  3. 10 0
      src/test/mapred-default.xml
  4. 5 5
      src/test/org/apache/hadoop/dfs/ClusterTestDFS.java

+ 22 - 4
build.xml

@@ -34,6 +34,8 @@
   <property name="test.build.data" value="${test.build.dir}/data"/>
   <property name="test.build.classes" value="${test.build.dir}/classes"/>
   <property name="test.build.javadoc" value="${test.build.dir}/docs/api"/>
+  <property name="test.include" value="Test*"/>
+  <property name="test.classpath.id" value="test.classpath"/>
 
   <property name="web.src.dir" value="${basedir}/src/web"/>
   <property name="src.webapps" value="${basedir}/src/webapps"/>
@@ -55,16 +57,23 @@
     <fileset dir="${lib.dir}">
       <include name="**/*.jar" />
     </fileset>
+    <pathelement location="${conf.dir}"/>
   </path>
 
-  <!-- the unit test classpath -->
+  <!-- the unit test classpath: uses test.src.dir for configuration -->
   <path id="test.classpath">
     <pathelement location="${test.build.classes}" />
-    <pathelement location="${conf.dir}"/>
     <pathelement location="${test.src.dir}"/>
     <path refid="classpath"/>
   </path>
 
+  <!-- the cluster test classpath: uses conf.dir for configuration -->
+  <path id="test.cluster.classpath">
+    <path refid="classpath"/>
+    <pathelement location="${test.build.classes}" />
+    <pathelement location="${test.src.dir}"/>
+  </path>
+
   <!-- ====================================================== -->
   <!-- Stuff needed by all targets                            -->
   <!-- ====================================================== -->
@@ -200,11 +209,12 @@
       errorProperty="tests.failed" failureProperty="tests.failed">
       <sysproperty key="test.build.data" value="${test.build.data}"/>
       <sysproperty key="test.src.dir" value="${test.src.dir}"/>
-      <classpath refid="test.classpath"/>
+      <classpath refid="${test.classpath.id}"/>
       <formatter type="plain" />
       <batchtest todir="${test.build.dir}" unless="testcase">
         <fileset dir="${test.src.dir}"
-                 includes="**/Test*.java" excludes="**/${test.exclude}.java" />
+	         includes="**/${test.include}.java"
+		 excludes="**/${test.exclude}.java" />
       </batchtest>
       <batchtest todir="${test.build.dir}" if="testcase">
         <fileset dir="${test.src.dir}" includes="**/${testcase}.java"/>
@@ -215,6 +225,14 @@
 
   </target>   
 
+  <!-- Run all unit tests, not just Test*, and use non-test configuration. -->
+  <target name="test-cluster">
+    <antcall target="test">
+      <param name="test.include" value="*"/>
+      <param name="test.classpath.id" value="test.cluster.classpath"/>
+    </antcall>
+  </target>
+
   <target name="nightly" depends="test, tar">
   </target>
 

+ 10 - 0
src/test/hadoop-site.xml

@@ -0,0 +1,10 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="nutch-conf.xsl"?>
+
+<!-- Values used when running unit tests.  This is mostly empty, to -->
+<!-- use of the default values, overriding the potentially -->
+<!-- user-editted hadoop-site.xml in the conf/ directory.  -->
+
+<configuration>
+
+</configuration>

+ 10 - 0
src/test/mapred-default.xml

@@ -0,0 +1,10 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="nutch-conf.xsl"?>
+
+<!-- Values used when running unit tests.  This is mostly empty, to -->
+<!-- use of the default values, overriding the potentially -->
+<!-- user-editted mapred-default.xml in the conf/ directory.  -->
+
+<configuration>
+
+</configuration>

+ 5 - 5
src/test/org/apache/hadoop/dfs/TestDFS.java → src/test/org/apache/hadoop/dfs/ClusterTestDFS.java

@@ -37,7 +37,7 @@ import java.lang.reflect.InvocationTargetException;
 
 /**
  * Test DFS.
- * TestDFS is a JUnit test for DFS using "pseudo multiprocessing" (or 
+ * ClusterTestDFS is a JUnit test for DFS using "pseudo multiprocessing" (or 
  more strictly, pseudo distributed) meaning all daemons run in one process 
  and sockets are used to communicate between daemons.  The test permutes
  * various block sizes, number of files, file sizes, and number of
@@ -68,9 +68,9 @@ import java.lang.reflect.InvocationTargetException;
  * (HadoopFS level) test {@link org.apache.hadoop.fs.TestFileSystem}.
  * @author Paul Baclace
  */
-public class TestDFS extends TestCase implements FSConstants {
+public class ClusterTestDFS extends TestCase implements FSConstants {
   private static final Logger LOG =
-      LogFormatter.getLogger("org.apache.hadoop.dfs.TestDFS");
+      LogFormatter.getLogger("org.apache.hadoop.dfs.ClusterTestDFS");
 
   private static Configuration conf = new Configuration();
   private static int BUFFER_SIZE =
@@ -518,12 +518,12 @@ public class TestDFS extends TestCase implements FSConstants {
   }
 
   public static void main(String[] args) throws Exception {
-    String usage = "Usage: TestDFS (no args)";
+    String usage = "Usage: ClusterTestDFS (no args)";
     if (args.length != 0) {
       System.err.println(usage);
       System.exit(-1);
     }
-    String[] testargs = {"org.apache.hadoop.dfs.TestDFS"};
+    String[] testargs = {"org.apache.hadoop.dfs.ClusterTestDFS"};
     junit.textui.TestRunner.main(testargs);
   }