Browse Source

HADOOP-7810. move hadoop archive to core from tools. (tucu)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1213907 13f79535-47bb-0310-9956-ffa450edef68
Alejandro Abdelnur 13 years ago
parent
commit
0201be46c2

+ 2 - 0
hadoop-common-project/hadoop-common/CHANGES.txt

@@ -136,6 +136,8 @@ Trunk (unreleased changes)
 
     HADOOP-7913 Fix bug in ProtoBufRpcEngine  (sanjay)
 
+    HADOOP-7810. move hadoop archive to core from tools. (tucu)
+
   OPTIMIZATIONS
 
     HADOOP-7761. Improve the performance of raw comparisons. (todd)

+ 0 - 0
hadoop-mapreduce-project/src/tools/org/apache/hadoop/fs/HarFileSystem.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java


+ 0 - 0
hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/fs/TestHarFileSystem.java → hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java


+ 11 - 0
hadoop-mapreduce-project/src/tools/org/apache/hadoop/tools/HadoopArchives.java → hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java

@@ -111,6 +111,14 @@ public class HadoopArchives implements Tool {
     } else {
       this.conf = new JobConf(conf, HadoopArchives.class);
     }
+
+    // This is for test purposes since MR2, different from Streaming
+    // here it is not possible to add a JAR to the classpath the tool
+    // will when running the mapreduce job.
+    String testJar = System.getProperty(TEST_HADOOP_ARCHIVES_JAR_PATH, null);
+    if (testJar != null) {
+      ((JobConf)conf).setJar(testJar);
+    }
   }
 
   public Configuration getConf() {
@@ -868,9 +876,12 @@ public class HadoopArchives implements Tool {
     return 0;
   }
 
+  static final String TEST_HADOOP_ARCHIVES_JAR_PATH = "test.hadoop.archives.jar";
+
   /** the main functions **/
   public static void main(String[] args) {
     JobConf job = new JobConf(HadoopArchives.class);
+
     HadoopArchives harchives = new HadoopArchives(job);
     int ret = 0;
 

+ 5 - 0
hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/tools/TestHadoopArchives.java → hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java

@@ -39,6 +39,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.mapred.MiniMRCluster;
+import org.apache.hadoop.util.JarFinder;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.log4j.Level;
 
@@ -46,6 +47,9 @@ import org.apache.log4j.Level;
  * test {@link HadoopArchives}
  */
 public class TestHadoopArchives extends TestCase {
+
+  public static final String HADOOP_ARCHIVES_JAR = JarFinder.getJar(HadoopArchives.class);
+
   {
     ((Log4JLogger)LogFactory.getLog(org.apache.hadoop.security.Groups.class)
         ).getLogger().setLevel(Level.OFF);
@@ -136,6 +140,7 @@ public class TestHadoopArchives extends TestCase {
           "*",
           archivePath.toString()
       };
+      System.setProperty(HadoopArchives.TEST_HADOOP_ARCHIVES_JAR_PATH, HADOOP_ARCHIVES_JAR);
       final HadoopArchives har = new HadoopArchives(mapred.createJobConf());
       assertEquals(0, ToolRunner.run(har, args));
 

+ 1 - 0
hadoop-tools/pom.xml

@@ -29,6 +29,7 @@
 
   <modules>
     <module>hadoop-streaming</module>
+    <module>hadoop-archives</module>
   </modules>
 
   <build>