Преглед изворни кода

HADOOP-325. Correctly initialize RPC parameter classes, and remove workaround code. Contributed by Owen.

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk@417569 13f79535-47bb-0310-9956-ffa450edef68
Doug Cutting пре 19 година
родитељ
комит
3c9bd64a62

+ 4 - 0
CHANGES.txt

@@ -74,6 +74,10 @@ Trunk (unreleased changes)
     duplicated DataNodeInfo.  The former is now deprecated, replaced
     duplicated DataNodeInfo.  The former is now deprecated, replaced
     by the latter.  (Konstantin Shvachko via cutting)
     by the latter.  (Konstantin Shvachko via cutting)
 
 
+18. HADOOP-325.  Fix a problem initializing RPC parameter classes, and
+    remove the workaround used to initialize classes.
+    (omalley via cutting)
+
 
 
 Release 0.3.2 - 2006-06-09
 Release 0.3.2 - 2006-06-09
 
 

+ 1 - 1
src/java/org/apache/hadoop/conf/Configuration.java

@@ -242,7 +242,7 @@ public class Configuration {
     if (valueString == null)
     if (valueString == null)
       return defaultValue;
       return defaultValue;
     try {
     try {
-      return classLoader.loadClass(valueString);
+      return Class.forName(valueString, true, classLoader);
     } catch (ClassNotFoundException e) {
     } catch (ClassNotFoundException e) {
       throw new RuntimeException(e);
       throw new RuntimeException(e);
     }
     }

+ 0 - 3
src/java/org/apache/hadoop/dfs/DFSClient.java

@@ -53,9 +53,6 @@ class DFSClient implements FSConstants {
     private long defaultBlockSize;
     private long defaultBlockSize;
     private short defaultReplication;
     private short defaultReplication;
     
     
-    // required for unknown reason to make WritableFactories work distributed
-    static { new DFSFileInfo(); }
-
     /**
     /**
      * A map from name -> DFSOutputStream of files that are currently being
      * A map from name -> DFSOutputStream of files that are currently being
      * written by this client.
      * written by this client.

+ 0 - 3
src/java/org/apache/hadoop/dfs/DFSShell.java

@@ -28,9 +28,6 @@ import org.apache.hadoop.util.ToolBase;
  **************************************************/
  **************************************************/
 public class DFSShell extends ToolBase {
 public class DFSShell extends ToolBase {
 
 
-    // required for unknown reason to make WritableFactories work distributed
-    static { new DatanodeInfo(); }
-
     FileSystem fs;
     FileSystem fs;
 
 
     /**
     /**

+ 0 - 3
src/java/org/apache/hadoop/dfs/DataNode.java

@@ -67,9 +67,6 @@ public class DataNode implements FSConstants, Runnable {
     //private static long numGigs = Configuration.get().getLong("dfs.datanode.maxgigs", 100);
     //private static long numGigs = Configuration.get().getLong("dfs.datanode.maxgigs", 100);
     //
     //
 
 
-    // required for unknown reason to make WritableFactories work distributed
-    static { new BlockCommand(); }
-
     /**
     /**
      * Util method to build socket addr from string
      * Util method to build socket addr from string
      */
      */

+ 0 - 6
src/java/org/apache/hadoop/dfs/NameNode.java

@@ -68,12 +68,6 @@ public class NameNode implements ClientProtocol, DatanodeProtocol, FSConstants {
     
     
     /** only used for testing purposes  */
     /** only used for testing purposes  */
     private boolean stopRequested = false;
     private boolean stopRequested = false;
-    // force loading of classes that will be received via RPC
-    // creating an instance will do the static initialization of the class
-    static {
-      new DatanodeRegistration();
-      new Block();
-    }
 
 
     /** Format a new filesystem.  Destroys any filesystem that may already
     /** Format a new filesystem.  Destroys any filesystem that may already
      * exist at this location.  **/
      * exist at this location.  **/

+ 7 - 5
src/java/org/apache/hadoop/io/ObjectWritable.java

@@ -172,9 +172,10 @@ public class ObjectWritable implements Writable, Configurable {
     if (declaredClass == null) {
     if (declaredClass == null) {
       try {
       try {
         declaredClass =
         declaredClass =
-          Thread.currentThread().getContextClassLoader().loadClass(className);
+          Class.forName(className, true, 
+                        Thread.currentThread().getContextClassLoader());
       } catch (ClassNotFoundException e) {
       } catch (ClassNotFoundException e) {
-        throw new RuntimeException(e.toString());
+        throw new RuntimeException("readObject can't find class", e);
       }
       }
     }    
     }    
 
 
@@ -217,10 +218,11 @@ public class ObjectWritable implements Writable, Configurable {
     } else {                                      // Writable
     } else {                                      // Writable
       Class instanceClass = null;
       Class instanceClass = null;
       try {
       try {
-        instanceClass = Thread.currentThread().getContextClassLoader()
-          .loadClass(UTF8.readString(in));
+        instanceClass = 
+          Class.forName(UTF8.readString(in), true, 
+                        Thread.currentThread().getContextClassLoader());
       } catch (ClassNotFoundException e) {
       } catch (ClassNotFoundException e) {
-        throw new RuntimeException(e.toString());
+        throw new RuntimeException("readObject can't find class", e);
       }
       }
       
       
       Writable writable = WritableFactories.newInstance(instanceClass);
       Writable writable = WritableFactories.newInstance(instanceClass);

+ 5 - 2
src/java/org/apache/hadoop/io/WritableName.java

@@ -62,9 +62,12 @@ public class WritableName {
     if (writableClass != null)
     if (writableClass != null)
       return writableClass;
       return writableClass;
     try {
     try {
-      return Thread.currentThread().getContextClassLoader().loadClass(name);
+      return Class.forName(name, true, 
+                           Thread.currentThread().getContextClassLoader());
     } catch (ClassNotFoundException e) {
     } catch (ClassNotFoundException e) {
-      throw new IOException(e.toString());
+      IOException newE = new IOException("WritableName can't load class");
+      newE.initCause(e);
+      throw newE;
     }
     }
   }
   }
 
 

+ 0 - 3
src/java/org/apache/hadoop/mapred/JobClient.java

@@ -39,9 +39,6 @@ public class JobClient extends ToolBase implements MRConstants  {
 
 
     static long MAX_JOBPROFILE_AGE = 1000 * 2;
     static long MAX_JOBPROFILE_AGE = 1000 * 2;
 
 
-    // required for unknown reason to make WritableFactories work distributed
-    static { new JobStatus(); new JobProfile(); new ClusterStatus(); }
-
     /**
     /**
      * A NetworkedJob is an implementation of RunningJob.  It holds
      * A NetworkedJob is an implementation of RunningJob.  It holds
      * a JobProfile object to provide some info, and interacts with the
      * a JobProfile object to provide some info, and interacts with the

+ 1 - 1
src/java/org/apache/hadoop/mapred/JobInProgress.java

@@ -116,7 +116,7 @@ class JobInProgress {
           try {
           try {
             ClassLoader loader =
             ClassLoader loader =
               new URLClassLoader(new URL[]{ localFs.pathToFile(localJarFile).toURL() });
               new URLClassLoader(new URL[]{ localFs.pathToFile(localJarFile).toURL() });
-            Class inputFormatClass = loader.loadClass(ifClassName);
+            Class inputFormatClass = Class.forName(ifClassName, true, loader);
             inputFormat = (InputFormat)inputFormatClass.newInstance();
             inputFormat = (InputFormat)inputFormatClass.newInstance();
           } catch (Exception e) {
           } catch (Exception e) {
             throw new IOException(e.toString());
             throw new IOException(e.toString());

+ 0 - 3
src/java/org/apache/hadoop/mapred/JobTracker.java

@@ -50,9 +50,6 @@ public class JobTracker implements MRConstants, InterTrackerProtocol, JobSubmiss
       idFormat.setGroupingUsed(false);
       idFormat.setGroupingUsed(false);
     }
     }
 
 
-    // required for unknown reason to make WritableFactories work distributed
-    static { new TaskTrackerStatus(); }
-
     private int nextJobId = 1;
     private int nextJobId = 1;
 
 
     public static final Log LOG = LogFactory.getLog("org.apache.hadoop.mapred.JobTracker");
     public static final Log LOG = LogFactory.getLog("org.apache.hadoop.mapred.JobTracker");

+ 0 - 3
src/java/org/apache/hadoop/mapred/TaskTracker.java

@@ -41,9 +41,6 @@ public class TaskTracker
 
 
     static final int STALE_STATE = 1;
     static final int STALE_STATE = 1;
 
 
-    // required for unknown reason to make WritableFactories work distributed
-    static { new MapTask(); new ReduceTask(); new MapOutputLocation(); }
-
     public static final Log LOG =
     public static final Log LOG =
     LogFactory.getLog("org.apache.hadoop.mapred.TaskTracker");
     LogFactory.getLog("org.apache.hadoop.mapred.TaskTracker");
 
 

+ 1 - 1
src/java/org/apache/hadoop/util/RunJar.java

@@ -120,7 +120,7 @@ public class RunJar {
       new URLClassLoader((URL[])classPath.toArray(new URL[0]));
       new URLClassLoader((URL[])classPath.toArray(new URL[0]));
 
 
     Thread.currentThread().setContextClassLoader(loader);
     Thread.currentThread().setContextClassLoader(loader);
-    Class mainClass = loader.loadClass(mainClassName);
+    Class mainClass = Class.forName(mainClassName, true, loader);
     Method main = mainClass.getMethod("main", new Class[] {
     Method main = mainClass.getMethod("main", new Class[] {
       Array.newInstance(String.class, 0).getClass()
       Array.newInstance(String.class, 0).getClass()
     });
     });