瀏覽代碼

Reverting change 399426, which broke distributed operation.

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk@399461 13f79535-47bb-0310-9956-ffa450edef68
Doug Cutting 19 年之前
父節點
當前提交
b4799d42e0

+ 1 - 6
CHANGES.txt

@@ -161,17 +161,12 @@ Trunk (unreleased)
 42. HADOOP-184. Re-structure some test code to better support testing
 42. HADOOP-184. Re-structure some test code to better support testing
     on a cluster.  (Mahadev Konar via cutting)
     on a cluster.  (Mahadev Konar via cutting)
 
 
-43. HADOOP-189.  Fix MapReduce in standalone configuration to
-    correctly handle job jar files that contain a lib directory with
-    nested jar files.  (cutting)
-
-44. HADOOP-191  Add streaming package, Hadoop's first contrib module.
+43. HADOOP-191  Add streaming package, Hadoop's first contrib module.
     This permits folks to easily submit MapReduce jobs whose map and
     This permits folks to easily submit MapReduce jobs whose map and
     reduce functions are implemented by shell commands.  Use
     reduce functions are implemented by shell commands.  Use
     'bin/hadoop jar build/hadoop-streaming.jar' to get details.
     'bin/hadoop jar build/hadoop-streaming.jar' to get details.
     (Michel Tourn via cutting)
     (Michel Tourn via cutting)
 
 
-
 Release 0.1.1 - 2006-04-08
 Release 0.1.1 - 2006-04-08
 
 
  1. Added CHANGES.txt, logging all significant changes to Hadoop.  (cutting)
  1. Added CHANGES.txt, logging all significant changes to Hadoop.  (cutting)

+ 8 - 1
bin/hadoop

@@ -135,7 +135,14 @@ elif [ "$COMMAND" = "tasktracker" ] ; then
 elif [ "$COMMAND" = "job" ] ; then
 elif [ "$COMMAND" = "job" ] ; then
   CLASS=org.apache.hadoop.mapred.JobClient
   CLASS=org.apache.hadoop.mapred.JobClient
 elif [ "$COMMAND" = "jar" ] ; then
 elif [ "$COMMAND" = "jar" ] ; then
-  CLASS=org.apache.hadoop.util.RunJar
+  JAR="$1"
+  shift
+  CLASS=`"$0" org.apache.hadoop.util.PrintJarMainClass "$JAR"`
+  if [ $? != 0 ]; then
+    echo "Error: Could not find main class in jar file $JAR"
+    exit 1
+  fi
+  CLASSPATH=${CLASSPATH}:${JAR}
 else
 else
   CLASS=$COMMAND
   CLASS=$COMMAND
 fi
 fi

+ 1 - 1
src/java/org/apache/hadoop/conf/Configuration.java

@@ -241,7 +241,7 @@ public class Configuration {
     if (valueString == null)
     if (valueString == null)
       return defaultValue;
       return defaultValue;
     try {
     try {
-      return classLoader.loadClass(valueString);
+      return Class.forName(valueString);
     } catch (ClassNotFoundException e) {
     } catch (ClassNotFoundException e) {
       throw new RuntimeException(e);
       throw new RuntimeException(e);
     }
     }

+ 3 - 6
src/java/org/apache/hadoop/io/ObjectWritable.java

@@ -88,8 +88,7 @@ public class ObjectWritable implements Writable, Configurable {
       declaredClass = (Class)PRIMITIVE_NAMES.get(className);
       declaredClass = (Class)PRIMITIVE_NAMES.get(className);
       if (declaredClass == null) {
       if (declaredClass == null) {
         try {
         try {
-          declaredClass =
-            Thread.currentThread().getContextClassLoader().loadClass(className);
+          declaredClass = Class.forName(className);
         } catch (ClassNotFoundException e) {
         } catch (ClassNotFoundException e) {
           throw new RuntimeException(e.toString());
           throw new RuntimeException(e.toString());
         }
         }
@@ -171,8 +170,7 @@ public class ObjectWritable implements Writable, Configurable {
     Class declaredClass = (Class)PRIMITIVE_NAMES.get(className);
     Class declaredClass = (Class)PRIMITIVE_NAMES.get(className);
     if (declaredClass == null) {
     if (declaredClass == null) {
       try {
       try {
-        declaredClass =
-          Thread.currentThread().getContextClassLoader().loadClass(className);
+        declaredClass = Class.forName(className);
       } catch (ClassNotFoundException e) {
       } catch (ClassNotFoundException e) {
         throw new RuntimeException(e.toString());
         throw new RuntimeException(e.toString());
       }
       }
@@ -217,8 +215,7 @@ public class ObjectWritable implements Writable, Configurable {
     } else {                                      // Writable
     } else {                                      // Writable
       Class instanceClass = null;
       Class instanceClass = null;
       try {
       try {
-        instanceClass = Thread.currentThread().getContextClassLoader()
-          .loadClass(UTF8.readString(in));
+        instanceClass = Class.forName(UTF8.readString(in));
       } catch (ClassNotFoundException e) {
       } catch (ClassNotFoundException e) {
         throw new RuntimeException(e.toString());
         throw new RuntimeException(e.toString());
       }
       }

+ 1 - 1
src/java/org/apache/hadoop/io/WritableName.java

@@ -62,7 +62,7 @@ public class WritableName {
     if (writableClass != null)
     if (writableClass != null)
       return writableClass;
       return writableClass;
     try {
     try {
-      return Thread.currentThread().getContextClassLoader().loadClass(name);
+      return Class.forName(name);
     } catch (ClassNotFoundException e) {
     } catch (ClassNotFoundException e) {
       throw new IOException(e.toString());
       throw new IOException(e.toString());
     }
     }

+ 32 - 2
src/java/org/apache/hadoop/mapred/TaskRunner.java

@@ -18,7 +18,6 @@ package org.apache.hadoop.mapred;
 import org.apache.hadoop.conf.*;
 import org.apache.hadoop.conf.*;
 import org.apache.hadoop.util.LogFormatter;
 import org.apache.hadoop.util.LogFormatter;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.fs.*;
-import org.apache.hadoop.util.*;
 
 
 import java.io.*;
 import java.io.*;
 import java.util.jar.*;
 import java.util.jar.*;
@@ -79,7 +78,7 @@ abstract class TaskRunner extends Thread {
 
 
       String jar = conf.getJar();
       String jar = conf.getJar();
       if (jar != null) {                      // if jar exists, it into workDir
       if (jar != null) {                      // if jar exists, it into workDir
-        RunJar.unJar(new File(jar), workDir);
+        unJar(new File(jar), workDir);
         File[] libs = new File(workDir, "lib").listFiles();
         File[] libs = new File(workDir, "lib").listFiles();
         if (libs != null) {
         if (libs != null) {
           for (int i = 0; i < libs.length; i++) {
           for (int i = 0; i < libs.length; i++) {
@@ -223,6 +222,37 @@ abstract class TaskRunner extends Thread {
     return text;
     return text;
   }
   }
 
 
+  private void unJar(File jarFile, File toDir) throws IOException {
+    JarFile jar = new JarFile(jarFile);
+    try {
+      Enumeration entries = jar.entries();
+      while (entries.hasMoreElements()) {
+        JarEntry entry = (JarEntry)entries.nextElement();
+        if (!entry.isDirectory()) {
+          InputStream in = jar.getInputStream(entry);
+          try {
+            File file = new File(toDir, entry.getName());
+            file.getParentFile().mkdirs();
+            OutputStream out = new FileOutputStream(file);
+            try {
+              byte[] buffer = new byte[8192];
+              int i;
+              while ((i = in.read(buffer)) != -1) {
+                out.write(buffer, 0, i);
+              }
+            } finally {
+              out.close();
+            }
+          } finally {
+            in.close();
+          }
+        }
+      }
+    } finally {
+      jar.close();
+    }
+  }
+
   /**
   /**
    * Run the child process
    * Run the child process
    */
    */

+ 0 - 135
src/java/org/apache/hadoop/util/RunJar.java

@@ -1,135 +0,0 @@
-/**
- * Copyright 2006 The Apache Software Foundation
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.util;
-
-import java.util.jar.*;
-import java.lang.reflect.*;
-import java.net.URL;
-import java.net.URLClassLoader;
-import java.io.*;
-import java.util.*;
-
-import org.apache.hadoop.fs.FileUtil;
-
-/** Run a Hadoop job jar. */
-public class RunJar {
-
-  /** Unpack a jar file into a directory. */
-  public static void unJar(File jarFile, File toDir) throws IOException {
-    JarFile jar = new JarFile(jarFile);
-    try {
-      Enumeration entries = jar.entries();
-      while (entries.hasMoreElements()) {
-        JarEntry entry = (JarEntry)entries.nextElement();
-        if (!entry.isDirectory()) {
-          InputStream in = jar.getInputStream(entry);
-          try {
-            File file = new File(toDir, entry.getName());
-            file.getParentFile().mkdirs();
-            OutputStream out = new FileOutputStream(file);
-            try {
-              byte[] buffer = new byte[8192];
-              int i;
-              while ((i = in.read(buffer)) != -1) {
-                out.write(buffer, 0, i);
-              }
-            } finally {
-              out.close();
-            }
-          } finally {
-            in.close();
-          }
-        }
-      }
-    } finally {
-      jar.close();
-    }
-  }
-
-  /** Run a Hadoop job jar.  If the main class is not in the jar's manifest,
-   * then it must be provided on the command line. */
-  public static void main(String[] args) throws Throwable {
-    String usage = "RunJar jarFile [mainClass] args...";
-
-    if (args.length < 1) {
-      System.err.println(usage);
-      System.exit(-1);
-    }
-
-    int firstArg = 0;
-    String fileName = args[firstArg++];
-    File file = new File(fileName);
-    String mainClassName = null;
-
-    JarFile jarFile = new JarFile(fileName);
-    Manifest manifest = jarFile.getManifest();
-    if (manifest != null) {
-      mainClassName = manifest.getMainAttributes().getValue("Main-Class");
-    }
-    jarFile.close();
-
-    if (mainClassName == null) {
-      if (args.length < 2) {
-        System.err.println(usage);
-        System.exit(-1);
-      }
-      mainClassName = args[firstArg++];
-    }
-    mainClassName = mainClassName.replaceAll("/", ".");
-
-    final File workDir = File.createTempFile("hadoop-unjar","");
-    workDir.delete();
-    workDir.mkdirs();
-
-    Runtime.getRuntime().addShutdownHook(new Thread() {
-        public void run() {
-          try {
-            FileUtil.fullyDelete(workDir);
-          } catch (IOException e) {
-          }
-        }
-      });
-
-    unJar(file, workDir);
-    
-    ArrayList classPath = new ArrayList();
-    File[] libs = new File(workDir, "lib").listFiles();
-    if (libs != null) {
-      for (int i = 0; i < libs.length; i++) {
-        classPath.add(libs[i].toURL());
-      }
-    }
-    classPath.add(new File(workDir, "classes/").toURL());
-    classPath.add(new File(workDir+"/").toURL());
-    ClassLoader loader =
-      new URLClassLoader((URL[])classPath.toArray(new URL[0]));
-
-    Thread.currentThread().setContextClassLoader(loader);
-    Class mainClass = loader.loadClass(mainClassName);
-    Method main = mainClass.getMethod("main", new Class[] {
-      Array.newInstance(String.class, 0).getClass()
-    });
-    String[] newArgs = (String[])Arrays.asList(args)
-      .subList(firstArg, args.length).toArray(new String[0]);
-    try {
-      main.invoke(null, new Object[] { newArgs });
-    } catch (InvocationTargetException e) {
-      throw e.getTargetException();
-    }
-  }
-  
-}