Browse Source

Fix for HADOOP-70. Unit tests should have their own hadoop-site.xml and mapred-default.xml, so that local modifications to these files in conf/ don't alter unit testing. Also rename TestDFS so that it is not normally run, and add a new test target which runs tests using the config files in conf/.

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk@385652 13f79535-47bb-0310-9956-ffa450edef68
Doug Cutting 19 years ago
parent
commit
dc207a0a5a

+ 22 - 4
build.xml

@@ -34,6 +34,8 @@
   <property name="test.build.data" value="${test.build.dir}/data"/>
   <property name="test.build.data" value="${test.build.dir}/data"/>
   <property name="test.build.classes" value="${test.build.dir}/classes"/>
   <property name="test.build.classes" value="${test.build.dir}/classes"/>
   <property name="test.build.javadoc" value="${test.build.dir}/docs/api"/>
   <property name="test.build.javadoc" value="${test.build.dir}/docs/api"/>
+  <property name="test.include" value="Test*"/>
+  <property name="test.classpath.id" value="test.classpath"/>
 
 
   <property name="web.src.dir" value="${basedir}/src/web"/>
   <property name="web.src.dir" value="${basedir}/src/web"/>
   <property name="src.webapps" value="${basedir}/src/webapps"/>
   <property name="src.webapps" value="${basedir}/src/webapps"/>
@@ -55,16 +57,23 @@
     <fileset dir="${lib.dir}">
     <fileset dir="${lib.dir}">
       <include name="**/*.jar" />
       <include name="**/*.jar" />
     </fileset>
     </fileset>
+    <pathelement location="${conf.dir}"/>
   </path>
   </path>
 
 
-  <!-- the unit test classpath -->
+  <!-- the unit test classpath: uses test.src.dir for configuration -->
   <path id="test.classpath">
   <path id="test.classpath">
     <pathelement location="${test.build.classes}" />
     <pathelement location="${test.build.classes}" />
-    <pathelement location="${conf.dir}"/>
     <pathelement location="${test.src.dir}"/>
     <pathelement location="${test.src.dir}"/>
     <path refid="classpath"/>
     <path refid="classpath"/>
   </path>
   </path>
 
 
+  <!-- the cluster test classpath: uses conf.dir for configuration -->
+  <path id="test.cluster.classpath">
+    <path refid="classpath"/>
+    <pathelement location="${test.build.classes}" />
+    <pathelement location="${test.src.dir}"/>
+  </path>
+
   <!-- ====================================================== -->
   <!-- ====================================================== -->
   <!-- Stuff needed by all targets                            -->
   <!-- Stuff needed by all targets                            -->
   <!-- ====================================================== -->
   <!-- ====================================================== -->
@@ -200,11 +209,12 @@
       errorProperty="tests.failed" failureProperty="tests.failed">
       errorProperty="tests.failed" failureProperty="tests.failed">
       <sysproperty key="test.build.data" value="${test.build.data}"/>
       <sysproperty key="test.build.data" value="${test.build.data}"/>
       <sysproperty key="test.src.dir" value="${test.src.dir}"/>
       <sysproperty key="test.src.dir" value="${test.src.dir}"/>
-      <classpath refid="test.classpath"/>
+      <classpath refid="${test.classpath.id}"/>
       <formatter type="plain" />
       <formatter type="plain" />
       <batchtest todir="${test.build.dir}" unless="testcase">
       <batchtest todir="${test.build.dir}" unless="testcase">
         <fileset dir="${test.src.dir}"
         <fileset dir="${test.src.dir}"
-                 includes="**/Test*.java" excludes="**/${test.exclude}.java" />
+	         includes="**/${test.include}.java"
+		 excludes="**/${test.exclude}.java" />
       </batchtest>
       </batchtest>
       <batchtest todir="${test.build.dir}" if="testcase">
       <batchtest todir="${test.build.dir}" if="testcase">
         <fileset dir="${test.src.dir}" includes="**/${testcase}.java"/>
         <fileset dir="${test.src.dir}" includes="**/${testcase}.java"/>
@@ -215,6 +225,14 @@
 
 
   </target>   
   </target>   
 
 
+  <!-- Run all unit tests, not just Test*, and use non-test configuration. -->
+  <target name="test-cluster">
+    <antcall target="test">
+      <param name="test.include" value="*"/>
+      <param name="test.classpath.id" value="test.cluster.classpath"/>
+    </antcall>
+  </target>
+
   <target name="nightly" depends="test, tar">
   <target name="nightly" depends="test, tar">
   </target>
   </target>
 
 

+ 10 - 0
src/test/hadoop-site.xml

@@ -0,0 +1,10 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="nutch-conf.xsl"?>
+
+<!-- Values used when running unit tests.  This is mostly empty, to -->
+<!-- use of the default values, overriding the potentially -->
+<!-- user-editted hadoop-site.xml in the conf/ directory.  -->
+
+<configuration>
+
+</configuration>

+ 10 - 0
src/test/mapred-default.xml

@@ -0,0 +1,10 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="nutch-conf.xsl"?>
+
+<!-- Values used when running unit tests.  This is mostly empty, to -->
+<!-- use of the default values, overriding the potentially -->
+<!-- user-editted mapred-default.xml in the conf/ directory.  -->
+
+<configuration>
+
+</configuration>

+ 5 - 5
src/test/org/apache/hadoop/dfs/TestDFS.java → src/test/org/apache/hadoop/dfs/ClusterTestDFS.java

@@ -37,7 +37,7 @@ import java.lang.reflect.InvocationTargetException;
 
 
 /**
 /**
  * Test DFS.
  * Test DFS.
- * TestDFS is a JUnit test for DFS using "pseudo multiprocessing" (or 
+ * ClusterTestDFS is a JUnit test for DFS using "pseudo multiprocessing" (or 
  more strictly, pseudo distributed) meaning all daemons run in one process 
  more strictly, pseudo distributed) meaning all daemons run in one process 
  and sockets are used to communicate between daemons.  The test permutes
  and sockets are used to communicate between daemons.  The test permutes
  * various block sizes, number of files, file sizes, and number of
  * various block sizes, number of files, file sizes, and number of
@@ -68,9 +68,9 @@ import java.lang.reflect.InvocationTargetException;
  * (HadoopFS level) test {@link org.apache.hadoop.fs.TestFileSystem}.
  * (HadoopFS level) test {@link org.apache.hadoop.fs.TestFileSystem}.
  * @author Paul Baclace
  * @author Paul Baclace
  */
  */
-public class TestDFS extends TestCase implements FSConstants {
+public class ClusterTestDFS extends TestCase implements FSConstants {
   private static final Logger LOG =
   private static final Logger LOG =
-      LogFormatter.getLogger("org.apache.hadoop.dfs.TestDFS");
+      LogFormatter.getLogger("org.apache.hadoop.dfs.ClusterTestDFS");
 
 
   private static Configuration conf = new Configuration();
   private static Configuration conf = new Configuration();
   private static int BUFFER_SIZE =
   private static int BUFFER_SIZE =
@@ -518,12 +518,12 @@ public class TestDFS extends TestCase implements FSConstants {
   }
   }
 
 
   public static void main(String[] args) throws Exception {
   public static void main(String[] args) throws Exception {
-    String usage = "Usage: TestDFS (no args)";
+    String usage = "Usage: ClusterTestDFS (no args)";
     if (args.length != 0) {
     if (args.length != 0) {
       System.err.println(usage);
       System.err.println(usage);
       System.exit(-1);
       System.exit(-1);
     }
     }
-    String[] testargs = {"org.apache.hadoop.dfs.TestDFS"};
+    String[] testargs = {"org.apache.hadoop.dfs.ClusterTestDFS"};
     junit.textui.TestRunner.main(testargs);
     junit.textui.TestRunner.main(testargs);
   }
   }