Browse Source

HADOOP-1190. Fix unchecked warnings in fs and io packages.

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk@529570 13f79535-47bb-0310-9956-ffa450edef68
Thomas White 18 năm trước cách đây
mục cha
commit
3423aba325

+ 4 - 2
src/java/org/apache/hadoop/fs/InMemoryFileSystem.java

@@ -44,13 +44,15 @@ public class InMemoryFileSystem extends ChecksumFileSystem {
     private Path staticWorkingDir;
   
     //pathToFileAttribs is the final place where a file is put after it is closed
-    private Map <String, FileAttributes> pathToFileAttribs = new HashMap();
+    private Map<String, FileAttributes> pathToFileAttribs =
+      new HashMap<String, FileAttributes>();
   
     //tempFileAttribs is a temp place which is updated while reserving memory for
     //files we are going to create. It is read in the createRaw method and the
     //temp key/value is discarded. If the file makes it to "close", then it
     //ends up being in the pathToFileAttribs map.
-    private Map <String, FileAttributes> tempFileAttribs = new HashMap();
+    private Map<String, FileAttributes> tempFileAttribs =
+      new HashMap<String, FileAttributes>();
   
     public RawInMemoryFileSystem() {
       setConf(new Configuration());

+ 8 - 6
src/java/org/apache/hadoop/fs/RawLocalFileSystem.java

@@ -36,9 +36,11 @@ public class RawLocalFileSystem extends FileSystem {
   static final URI NAME = URI.create("file:///");
   private Path workingDir =
     new Path(System.getProperty("user.dir"));
-  TreeMap sharedLockDataSet = new TreeMap();
-  TreeMap nonsharedLockDataSet = new TreeMap();
-  TreeMap lockObjSet = new TreeMap();
+  TreeMap<File, FileInputStream> sharedLockDataSet =
+    new TreeMap<File, FileInputStream>();
+  TreeMap<File, FileOutputStream> nonsharedLockDataSet =
+    new TreeMap<File, FileOutputStream>();
+  TreeMap<File, FileLock> lockObjSet = new TreeMap<File, FileLock>();
   // by default use copy/delete instead of rename
   boolean useCopyForRename = true;
   
@@ -308,9 +310,9 @@ public class RawLocalFileSystem extends FileSystem {
     FileInputStream sharedLockData;
     FileOutputStream nonsharedLockData;
     synchronized (this) {
-      lockObj = (FileLock) lockObjSet.remove(f);
-      sharedLockData = (FileInputStream) sharedLockDataSet.remove(f);
-      nonsharedLockData = (FileOutputStream) nonsharedLockDataSet.remove(f);
+      lockObj = lockObjSet.remove(f);
+      sharedLockData = sharedLockDataSet.remove(f);
+      nonsharedLockData = nonsharedLockDataSet.remove(f);
     }
     
     if (lockObj == null) {

+ 6 - 5
src/java/org/apache/hadoop/io/ObjectWritable.java

@@ -65,7 +65,7 @@ public class ObjectWritable implements Writable, Configurable {
     writeObject(out, instance, declaredClass, conf);
   }
 
-  private static final Map PRIMITIVE_NAMES = new HashMap();
+  private static final Map<String, Class<?>> PRIMITIVE_NAMES = new HashMap<String, Class<?>>();
   static {
     PRIMITIVE_NAMES.put("boolean", Boolean.TYPE);
     PRIMITIVE_NAMES.put("byte", Byte.TYPE);
@@ -79,7 +79,7 @@ public class ObjectWritable implements Writable, Configurable {
   }
 
   private static class NullInstance extends Configured implements Writable {
-    private Class declaredClass;
+    private Class<?> declaredClass;
     public NullInstance() { super(null); }
     public NullInstance(Class declaredClass, Configuration conf) {
       super(conf);
@@ -87,7 +87,7 @@ public class ObjectWritable implements Writable, Configurable {
     }
     public void readFields(DataInput in) throws IOException {
       String className = UTF8.readString(in);
-      declaredClass = (Class)PRIMITIVE_NAMES.get(className);
+      declaredClass = PRIMITIVE_NAMES.get(className);
       if (declaredClass == null) {
         try {
           declaredClass = getConf().getClassByName(className);
@@ -168,10 +168,11 @@ public class ObjectWritable implements Writable, Configurable {
     
   /** Read a {@link Writable}, {@link String}, primitive type, or an array of
    * the preceding. */
+  @SuppressWarnings("unchecked")
   public static Object readObject(DataInput in, ObjectWritable objectWritable, Configuration conf)
     throws IOException {
     String className = UTF8.readString(in);
-    Class declaredClass = (Class)PRIMITIVE_NAMES.get(className);
+    Class<?> declaredClass = PRIMITIVE_NAMES.get(className);
     if (declaredClass == null) {
       try {
         declaredClass = conf.getClassByName(className);
@@ -216,7 +217,7 @@ public class ObjectWritable implements Writable, Configurable {
     } else if (declaredClass == String.class) {        // String
       instance = UTF8.readString(in);
     } else if( declaredClass.isEnum() ) {         // enum
-      instance = Enum.valueOf( declaredClass, UTF8.readString(in) );
+      instance = Enum.valueOf( (Class<? extends Enum>) declaredClass, UTF8.readString(in) );
     } else {                                      // Writable
       Class instanceClass = null;
       try {

+ 16 - 10
src/java/org/apache/hadoop/io/SequenceFile.java

@@ -530,9 +530,10 @@ public class SequenceFile {
     
     public void write(DataOutput out) throws IOException {
       out.writeInt(this.theMetadata.size());
-      Iterator iter = this.theMetadata.entrySet().iterator();
+      Iterator<Map.Entry<Text, Text>> iter =
+        this.theMetadata.entrySet().iterator();
       while (iter.hasNext()) {
-        Map.Entry<Text, Text> en = (Map.Entry<Text, Text>)iter.next();
+        Map.Entry<Text, Text> en = iter.next();
         en.getKey().write(out);
         en.getValue().write(out);
       }
@@ -556,11 +557,13 @@ public class SequenceFile {
       if (this.theMetadata.size() != other.theMetadata.size()) {
         return false;
       }
-      Iterator iter1 = this.theMetadata.entrySet().iterator();
-      Iterator iter2 = other.theMetadata.entrySet().iterator();
+      Iterator<Map.Entry<Text, Text>> iter1 =
+        this.theMetadata.entrySet().iterator();
+      Iterator<Map.Entry<Text, Text>> iter2 =
+        other.theMetadata.entrySet().iterator();
       while (iter1.hasNext() && iter2.hasNext()) {
-        Map.Entry<Text, Text> en1 = (Map.Entry<Text, Text>)iter1.next();
-        Map.Entry<Text, Text> en2 = (Map.Entry<Text, Text>)iter2.next();
+        Map.Entry<Text, Text> en1 = iter1.next();
+        Map.Entry<Text, Text> en2 = iter2.next();
         if (!en1.getKey().equals(en2.getKey())) {
           return false;
         }
@@ -577,9 +580,10 @@ public class SequenceFile {
     public String toString() {
       StringBuffer sb = new StringBuffer();
       sb.append("size: ").append(this.theMetadata.size()).append("\n");
-      Iterator iter = this.theMetadata.entrySet().iterator();
+      Iterator<Map.Entry<Text, Text>> iter =
+        this.theMetadata.entrySet().iterator();
       while (iter.hasNext()) {
-        Map.Entry<Text, Text> en = (Map.Entry<Text, Text>)iter.next();
+        Map.Entry<Text, Text> en = iter.next();
         sb.append("\t").append(en.getKey().toString()).append("\t").append(en.getValue().toString());
         sb.append("\n");
       }
@@ -2293,7 +2297,8 @@ public class SequenceFile {
       
       //a TreeMap used to store the segments sorted by size (segment offset and
       //segment path name is used to break ties between segments of same sizes)
-      private Map <SegmentDescriptor, Void> sortedSegmentSizes = new TreeMap();
+      private Map<SegmentDescriptor, Void> sortedSegmentSizes =
+        new TreeMap<SegmentDescriptor, Void>();
             
       public void put(SegmentDescriptor stream) throws IOException {
         if (size() == 0) {
@@ -2381,7 +2386,8 @@ public class SequenceFile {
         do {
           //get the factor for this pass of merge
           factor = getPassFactor(passNo, numSegments);
-          List <SegmentDescriptor> segmentsToMerge = new ArrayList();
+          List<SegmentDescriptor> segmentsToMerge =
+            new ArrayList<SegmentDescriptor>();
           int segmentsConsidered = 0;
           int numSegmentsToConsider = factor;
           while (true) {

+ 4 - 2
src/java/org/apache/hadoop/io/WritableComparator.java

@@ -32,11 +32,12 @@ import java.util.*;
  */
 public class WritableComparator implements Comparator {
 
-  private static HashMap comparators = new HashMap(); // registry
+  private static HashMap<Class, WritableComparator> comparators =
+    new HashMap<Class, WritableComparator>(); // registry
 
   /** Get a comparator for a {@link WritableComparable} implementation. */
   public static synchronized WritableComparator get(Class c) {
-    WritableComparator comparator = (WritableComparator)comparators.get(c);
+    WritableComparator comparator = comparators.get(c);
     if (comparator == null)
       comparator = new WritableComparator(c);
     return comparator;
@@ -103,6 +104,7 @@ public class WritableComparator implements Comparator {
    *
    * <p> The default implementation uses the natural ordering, calling {@link
    * Comparable#compareTo(Object)}. */
+  @SuppressWarnings("unchecked")
   public int compare(WritableComparable a, WritableComparable b) {
     return a.compareTo(b);
   }

+ 3 - 2
src/java/org/apache/hadoop/io/WritableFactories.java

@@ -25,7 +25,8 @@ import java.util.HashMap;
 /** Factories for non-public writables.  Defining a factory permits {@link
  * ObjectWritable} to be able to construct instances of non-public classes. */
 public class WritableFactories {
-  private static final HashMap CLASS_TO_FACTORY = new HashMap();
+  private static final HashMap<Class, WritableFactory> CLASS_TO_FACTORY =
+    new HashMap<Class, WritableFactory>();
 
   private WritableFactories() {}                  // singleton
 
@@ -36,7 +37,7 @@ public class WritableFactories {
 
   /** Define a factory for a class. */
   public static synchronized WritableFactory getFactory(Class c) {
-    return (WritableFactory)CLASS_TO_FACTORY.get(c);
+    return CLASS_TO_FACTORY.get(c);
   }
 
   /** Create a new instance of a class with a defined factory. */

+ 6 - 4
src/java/org/apache/hadoop/io/WritableName.java

@@ -28,8 +28,10 @@ import org.apache.hadoop.conf.Configuration;
  * @author Doug Cutting
  */
 public class WritableName {
-  private static HashMap NAME_TO_CLASS = new HashMap();
-  private static HashMap CLASS_TO_NAME = new HashMap();
+  private static HashMap<String, Class> NAME_TO_CLASS =
+    new HashMap<String, Class>();
+  private static HashMap<Class, String> CLASS_TO_NAME =
+    new HashMap<Class, String>();
 
   static {                                        // define important types
     WritableName.setName(NullWritable.class, "null");
@@ -54,7 +56,7 @@ public class WritableName {
 
   /** Return the name for a class.  Default is {@link Class#getName()}. */
   public static synchronized String getName(Class writableClass) {
-    String name = (String)CLASS_TO_NAME.get(writableClass);
+    String name = CLASS_TO_NAME.get(writableClass);
     if (name != null)
       return name;
     return writableClass.getName();
@@ -64,7 +66,7 @@ public class WritableName {
   public static synchronized Class getClass(String name,
                                             Configuration conf
                                             ) throws IOException {
-    Class writableClass = (Class)NAME_TO_CLASS.get(name);
+    Class writableClass = NAME_TO_CLASS.get(name);
     if (writableClass != null)
       return writableClass;
     try {