Browse Source

Merge -r 1213907:1213908 and 1213910:1213911 from trunk to branch. FIXES: HADOOP-7810

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1213915 13f79535-47bb-0310-9956-ffa450edef68
Alejandro Abdelnur 13 years ago
parent
commit
771a5e7dce

+ 2 - 0
hadoop-common-project/hadoop-common/CHANGES.txt

@@ -89,6 +89,8 @@ Release 0.23.1 - Unreleased
    HADOOP-7914. Remove the duplicated declaration of hadoop-hdfs test-jar in
    hadoop-project/pom.xml.  (szetszwo)
 
+   HADOOP-7810 move hadoop archive to core from tools. (tucu)
+
 Release 0.23.0 - 2011-11-01 
 
   INCOMPATIBLE CHANGES

+ 0 - 0
hadoop-mapreduce-project/src/tools/org/apache/hadoop/fs/HarFileSystem.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java


+ 0 - 0
hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/fs/TestHarFileSystem.java → hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java


+ 126 - 0
hadoop-tools/hadoop-archives/pom.xml

@@ -0,0 +1,126 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project>
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>hadoop-project</artifactId>
+    <version>0.23.1-SNAPSHOT</version>
+    <relativePath>../../hadoop-project</relativePath>
+  </parent>
+  <groupId>org.apache.hadoop</groupId>
+  <artifactId>hadoop-archives</artifactId>
+  <version>0.23.1-SNAPSHOT</version>
+  <description>Apache Hadoop Archives</description>
+  <name>Apache Hadoop Archives</name>
+  <packaging>jar</packaging>
+
+  <properties>
+    <hadoop.log.dir>${project.build.directory}/log</hadoop.log.dir>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-annotations</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-hs</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+      <scope>test</scope>
+      <type>test-jar</type>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <scope>test</scope>
+      <type>test-jar</type>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <scope>test</scope>
+      <type>test-jar</type>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-server-tests</artifactId>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>create-log-dir</id>
+            <phase>process-test-resources</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <target>
+                <delete dir="${test.build.data}"/>
+                <mkdir dir="${test.build.data}"/>
+                <mkdir dir="${hadoop.log.dir}"/>
+              </target>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+         <configuration>
+          <archive>
+           <manifest>
+            <mainClass>org.apache.hadoop.tools.HadoopArchives</mainClass>
+           </manifest>
+         </archive>
+        </configuration>
+       </plugin>
+    </plugins>
+  </build>
+</project>

+ 11 - 0
hadoop-mapreduce-project/src/tools/org/apache/hadoop/tools/HadoopArchives.java → hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java

@@ -111,6 +111,14 @@ public class HadoopArchives implements Tool {
     } else {
       this.conf = new JobConf(conf, HadoopArchives.class);
     }
+
+    // This is for test purposes since MR2, different from Streaming
+    // here it is not possible to add a JAR to the classpath the tool
+    // will when running the mapreduce job.
+    String testJar = System.getProperty(TEST_HADOOP_ARCHIVES_JAR_PATH, null);
+    if (testJar != null) {
+      ((JobConf)conf).setJar(testJar);
+    }
   }
 
   public Configuration getConf() {
@@ -868,9 +876,12 @@ public class HadoopArchives implements Tool {
     return 0;
   }
 
+  static final String TEST_HADOOP_ARCHIVES_JAR_PATH = "test.hadoop.archives.jar";
+
   /** the main functions **/
   public static void main(String[] args) {
     JobConf job = new JobConf(HadoopArchives.class);
+
     HadoopArchives harchives = new HadoopArchives(job);
     int ret = 0;
 

+ 5 - 0
hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/tools/TestHadoopArchives.java → hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java

@@ -39,6 +39,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.mapred.MiniMRCluster;
+import org.apache.hadoop.util.JarFinder;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.log4j.Level;
 
@@ -46,6 +47,9 @@ import org.apache.log4j.Level;
  * test {@link HadoopArchives}
  */
 public class TestHadoopArchives extends TestCase {
+
+  public static final String HADOOP_ARCHIVES_JAR = JarFinder.getJar(HadoopArchives.class);
+
   {
     ((Log4JLogger)LogFactory.getLog(org.apache.hadoop.security.Groups.class)
         ).getLogger().setLevel(Level.OFF);
@@ -136,6 +140,7 @@ public class TestHadoopArchives extends TestCase {
           "*",
           archivePath.toString()
       };
+      System.setProperty(HadoopArchives.TEST_HADOOP_ARCHIVES_JAR_PATH, HADOOP_ARCHIVES_JAR);
       final HadoopArchives har = new HadoopArchives(mapred.createJobConf());
       assertEquals(0, ToolRunner.run(har, args));
 

+ 1 - 0
hadoop-tools/pom.xml

@@ -29,6 +29,7 @@
 
   <modules>
     <module>hadoop-streaming</module>
+    <module>hadoop-archives</module>
   </modules>
 
   <build>