Просмотр исходного кода

HADOOP-1310. Fix unchecked warnings in aggregate code.

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk@536239 13f79535-47bb-0310-9956-ffa450edef68
Thomas White 18 лет назад
Родитель
Сommit
e0f9af2aa0

+ 2 - 0
CHANGES.txt

@@ -360,6 +360,8 @@ Trunk (unreleased changes)
      generates it rather than the entire task tracker.
      (Arun C Murthy via cutting)
 
+107. HADOOP-1310.  Fix unchecked warnings in aggregate code.  (tomwhite)
+
 
 Release 0.12.3 - 2007-04-06
 

+ 4 - 4
src/java/org/apache/hadoop/mapred/lib/aggregate/UniqValueCount.java

@@ -29,14 +29,14 @@ import java.util.TreeMap;
  */
 public class UniqValueCount implements ValueAggregator {
 
-  TreeMap uniqItems = null;
+  TreeMap<Object, Object> uniqItems = null;
 
   /**
    * the default constructor
    * 
    */
   public UniqValueCount() {
-    uniqItems = new TreeMap();
+    uniqItems = new TreeMap<Object, Object>();
   }
 
   /**
@@ -70,7 +70,7 @@ public class UniqValueCount implements ValueAggregator {
    * reset the aggregator
    */
   public void reset() {
-    uniqItems = new TreeMap();
+    uniqItems = new TreeMap<Object, Object>();
   }
 
   /**
@@ -80,7 +80,7 @@ public class UniqValueCount implements ValueAggregator {
   public ArrayList getCombinerOutput() {
     Object key = null;
     Iterator iter = uniqItems.keySet().iterator();
-    ArrayList retv = new ArrayList();
+    ArrayList<Object> retv = new ArrayList<Object>();
 
     while (iter.hasNext()) {
       key = iter.next();

+ 1 - 1
src/java/org/apache/hadoop/mapred/lib/aggregate/UserDefinedValueAggregatorDescriptor.java

@@ -48,7 +48,7 @@ public class UserDefinedValueAggregatorDescriptor implements
     Object retv = null;
     try {
       ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
-      Class theFilterClass = Class.forName(className, true, classLoader);
+      Class<?> theFilterClass = Class.forName(className, true, classLoader);
       Constructor meth = theFilterClass.getDeclaredConstructor(argArray);
       meth.setAccessible(true);
       retv = meth.newInstance();

+ 3 - 1
src/java/org/apache/hadoop/mapred/lib/aggregate/ValueAggregatorJob.java

@@ -24,6 +24,7 @@ import java.util.ArrayList;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
@@ -111,7 +112,8 @@ public class ValueAggregatorJob {
       numOfReducers = Integer.parseInt(args[2]);
     }
 
-    Class theInputFormat = SequenceFileInputFormat.class;
+    Class<? extends InputFormat> theInputFormat =
+      SequenceFileInputFormat.class;
     if (args.length > 3 && args[3].compareToIgnoreCase("textinputformat") == 0) {
       theInputFormat = TextInputFormat.class;
     }

+ 4 - 4
src/java/org/apache/hadoop/mapred/lib/aggregate/ValueHistogram.java

@@ -33,10 +33,10 @@ import org.apache.hadoop.io.Text;
  */
 public class ValueHistogram implements ValueAggregator {
 
-  TreeMap items = null;
+  TreeMap<Object, Object> items = null;
 
   public ValueHistogram() {
-    items = new TreeMap();
+    items = new TreeMap<Object, Object>();
   }
 
   /**
@@ -150,7 +150,7 @@ public class ValueHistogram implements ValueAggregator {
    *  The return value is expected to be used by the reducer.
    */
   public ArrayList getCombinerOutput() {
-    ArrayList retv = new ArrayList();
+    ArrayList<String> retv = new ArrayList<String>();
     Iterator iter = items.entrySet().iterator();
 
     while (iter.hasNext()) {
@@ -174,7 +174,7 @@ public class ValueHistogram implements ValueAggregator {
    * reset the aggregator
    */
   public void reset() {
-    items = new TreeMap();
+    items = new TreeMap<Object, Object>();
   }
 
 }

+ 1 - 1
src/test/org/apache/hadoop/fs/DFSCIOTest.java

@@ -264,7 +264,7 @@ public class DFSCIOTest extends TestCase {
     runIOTest(WriteMapper.class, WRITE_DIR);
   }
   
-  private static void runIOTest( Class mapperClass, 
+  private static void runIOTest( Class<? extends Mapper> mapperClass, 
                                  Path outputDir
                                  ) throws IOException {
     JobConf job = new JobConf(fsConfig, DFSCIOTest.class);

+ 1 - 1
src/test/org/apache/hadoop/fs/TestDFSIO.java

@@ -219,7 +219,7 @@ public class TestDFSIO extends TestCase {
     runIOTest(WriteMapper.class, WRITE_DIR);
   }
   
-  private static void runIOTest( Class mapperClass, 
+  private static void runIOTest( Class<? extends Mapper> mapperClass, 
                                  Path outputDir
                                  ) throws IOException {
     JobConf job = new JobConf(fsConfig, TestDFSIO.class);