瀏覽代碼

HADOOP-1190. Fix unchecked warnings in main Hadoop code.

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk@532046 13f79535-47bb-0310-9956-ffa450edef68
Thomas White 18 年之前
父節點
當前提交
8351e18d2a

+ 3 - 0
CHANGES.txt

@@ -245,6 +245,9 @@ Trunk (unreleased changes)
 73. HADOOP-1271.  Fix StreamBaseRecordReader to be able to log record 
     data that's not UTF-8.  (Arun C Murthy via tomwhite)
 
+74. HADOOP-1190.  Fix unchecked warnings in main Hadoop code.  
+    (tomwhite)
+
 
 Release 0.12.3 - 2007-04-06
 

+ 22 - 4
build.xml

@@ -68,6 +68,7 @@
   <property name="javac.deprecation" value="off"/>
   <property name="javac.version" value="1.5"/>
   <property name="javac.args" value=""/>
+  <property name="javac.args.warnings" value="-Xlint:unchecked"/>
 
   <!-- the normal classpath -->
   <path id="classpath">
@@ -205,10 +206,27 @@
      webxml="${build.webapps}/datanode/WEB-INF/web.xml">
     </jsp-compile>
 
+    <!-- Compile Java files (excluding JSPs) checking warnings -->
     <javac 
      encoding="${build.encoding}" 
      srcdir="${src.dir};${build.src}"
      includes="org/apache/hadoop/**/*.java"
+     excludes="org/apache/hadoop/**/*_jsp.java"
+     destdir="${build.classes}"
+     debug="${javac.debug}"
+     optimize="${javac.optimize}"
+     target="${javac.version}"
+     source="${javac.version}"
+     deprecation="${javac.deprecation}">
+      <compilerarg line="${javac.args} ${javac.args.warnings}" />
+      <classpath refid="classpath"/>
+    </javac>   
+  	
+    <!-- Compile JSPs without checking warnings -->
+    <javac 
+     encoding="${build.encoding}" 
+     srcdir="${src.dir};${build.src}"
+     includes="org/apache/hadoop/**/*_jsp.java"
      destdir="${build.classes}"
      debug="${javac.debug}"
      optimize="${javac.optimize}"
@@ -217,7 +235,7 @@
      deprecation="${javac.deprecation}">
       <compilerarg line="${javac.args}" />
       <classpath refid="classpath"/>
-    </javac>    
+    </javac>
     
     <copy todir="${build.classes}">
       <fileset 
@@ -300,7 +318,7 @@
      target="${javac.version}"
      source="${javac.version}"
      deprecation="${javac.deprecation}">
-      <compilerarg line="${javac.args}" />
+      <compilerarg line="${javac.args} ${javac.args.warnings}" />
       <classpath refid="classpath"/>
     </javac>    
   </target>
@@ -394,7 +412,7 @@
      target="${javac.version}"
      source="${javac.version}"
      deprecation="${javac.deprecation}">
-      <compilerarg line="${javac.args}" />
+      <compilerarg line="${javac.args} ${javac.args.warnings}" />
       <classpath refid="test.classpath"/>
     </javac> 
     <javac
@@ -407,7 +425,7 @@
      target="${javac.version}"
      source="${javac.version}"
      deprecation="${javac.deprecation}">
-      <compilerarg line="${javac.args}" />
+      <compilerarg line="${javac.args} ${javac.args.warnings}" />
       <classpath refid="test.classpath"/>
     </javac>                                 
     <delete file="${test.build.testjar}/testjob.jar"/> 

+ 4 - 8
src/test/org/apache/hadoop/conf/TestConfiguration.java

@@ -22,13 +22,11 @@ import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.Iterator;
-
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapred.JobConf;
 
 import junit.framework.TestCase;
 
+import org.apache.hadoop.fs.Path;
+
 
 public class TestConfiguration extends TestCase {
 
@@ -73,9 +71,7 @@ public class TestConfiguration extends TestCase {
     Path fileResource = new Path(CONFIG);
     conf.addDefaultResource(fileResource);
 
-    Iterator it = props.iterator();
-    while(it.hasNext()) {
-      Prop p = (Prop)it.next();
+    for (Prop p : props) {
       System.out.println("p=" + p.name);
       String gotVal = conf.get(p.name);
       String gotRawVal = (String)conf.getObject(p.name);
@@ -106,7 +102,7 @@ public class TestConfiguration extends TestCase {
   }
 
   final String UNSPEC = null;
-  ArrayList props = new ArrayList();
+  ArrayList<Prop> props = new ArrayList<Prop>();
 
   void declareProperty(String name, String val, String expectEval)
     throws IOException {

+ 2 - 2
src/test/org/apache/hadoop/dfs/ClusterTestDFS.java

@@ -226,7 +226,7 @@ public class ClusterTestDFS extends TestCase implements FSConstants {
       //
       //        start some DataNodes
       //
-      ArrayList listOfDataNodeDaemons = new ArrayList();
+      ArrayList<DataNode> listOfDataNodeDaemons = new ArrayList<DataNode>();
       conf.set("fs.default.name", nameNodeSocketAddr);
       for (int i = 0; i < initialDNcount; i++) {
         // uniquely config real fs path for data storage for this datanode
@@ -253,7 +253,7 @@ public class ClusterTestDFS extends TestCase implements FSConstants {
         //
         //           write nBytes of data using randomDataGenerator to numFiles
         //
-        ArrayList testfilesList = new ArrayList();
+        ArrayList<UTF8> testfilesList = new ArrayList<UTF8>();
         byte[] buffer = new byte[bufferSize];
         UTF8 testFileName = null;
         for (int iFileNumber = 0; iFileNumber < numFiles; iFileNumber++) {

+ 2 - 2
src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java

@@ -66,7 +66,7 @@ public class ClusterTestDFSNamespaceLogging extends TestCase implements FSConsta
   /** DFS client, datanodes, and namenode
    */
   DFSClient dfsClient;
-  ArrayList dataNodeDaemons = new ArrayList();
+  ArrayList<DataNode> dataNodeDaemons = new ArrayList<DataNode>();
   NameNode nameNodeDaemon;
   
   /** Log header length
@@ -398,7 +398,7 @@ public class ClusterTestDFSNamespaceLogging extends TestCase implements FSConsta
     msg("begin shutdown of all datanode daemons");
 
     for (int i = 0; i < dataNodeDaemons.size(); i++) {
-      DataNode dataNode = (DataNode) dataNodeDaemons.get(i);
+      DataNode dataNode = dataNodeDaemons.get(i);
       try {
         dataNode.shutdown();
       } catch (Exception e) {

+ 1 - 1
src/test/org/apache/hadoop/dfs/TestDecommission.java

@@ -164,7 +164,7 @@ public class TestDecommission extends TestCase {
     System.out.println("Decommissioning node: " + nodename);
 
     // write nodename into the exclude file.
-    ArrayList<String> nodes = (ArrayList<String>)decommissionedNodes.clone();
+    ArrayList<String> nodes = new ArrayList<String>(decommissionedNodes);
     nodes.add(nodename);
     writeConfigFile(localFileSys, excludeFile, nodes);
     dfs.refreshNodes();

+ 3 - 3
src/test/org/apache/hadoop/fs/DistributedFSCheck.java

@@ -285,7 +285,7 @@ public class DistributedFSCheck extends TestCase {
       }
     }
     
-    Vector resultLines = new Vector();
+    Vector<String> resultLines = new Vector<String>();
     resultLines.add( "----- DistributedFSCheck ----- : ");
     resultLines.add( "               Date & time: " + new Date(System.currentTimeMillis()));
     resultLines.add( "    Total number of blocks: " + blocks);
@@ -293,7 +293,7 @@ public class DistributedFSCheck extends TestCase {
     resultLines.add( "Number of corrupted blocks: " + nrBadBlocks);
     
     int nrBadFilesPos = resultLines.size();
-    TreeSet badFiles = new TreeSet();
+    TreeSet<String> badFiles = new TreeSet<String>();
     long nrBadFiles = 0;
     if (nrBadBlocks > 0) {
       resultLines.add("");
@@ -321,7 +321,7 @@ public class DistributedFSCheck extends TestCase {
                                       new FileOutputStream(
                                                            new File(resFileName), true)); 
     for(int i = 0; i < resultLines.size(); i++) {
-      String cur = (String)resultLines.get(i);
+      String cur = resultLines.get(i);
       LOG.info(cur);
       res.println(cur);
     }

+ 7 - 5
src/test/org/apache/hadoop/io/TestSequenceFile.java

@@ -230,7 +230,8 @@ public class TestSequenceFile extends TestCase {
     throws IOException {
     LOG.info("sorting " + count + " records in memory for debug");
     RandomDatum.Generator generator = new RandomDatum.Generator(seed);
-    SortedMap map = new TreeMap();
+    SortedMap<RandomDatum, RandomDatum> map =
+      new TreeMap<RandomDatum, RandomDatum>();
     for (int i = 0; i < count; i++) {
       generator.next();
       RandomDatum key = generator.getKey();
@@ -241,13 +242,14 @@ public class TestSequenceFile extends TestCase {
     LOG.debug("checking order of " + count + " records");
     RandomDatum k = new RandomDatum();
     RandomDatum v = new RandomDatum();
-    Iterator iterator = map.entrySet().iterator();
+    Iterator<Map.Entry<RandomDatum, RandomDatum>> iterator =
+      map.entrySet().iterator();
     SequenceFile.Reader reader =
       new SequenceFile.Reader(fs, file.suffix(".sorted"), conf);
     for (int i = 0; i < count; i++) {
-      Map.Entry entry = (Map.Entry)iterator.next();
-      RandomDatum key = (RandomDatum)entry.getKey();
-      RandomDatum value = (RandomDatum)entry.getValue();
+      Map.Entry<RandomDatum, RandomDatum> entry = iterator.next();
+      RandomDatum key = entry.getKey();
+      RandomDatum value = entry.getValue();
 
       reader.next(k, v);
 

+ 1 - 1
src/test/org/apache/hadoop/mapred/TestMiniMRWithDFS.java

@@ -110,7 +110,7 @@ public class TestMiniMRWithDFS extends TestCase {
                                            String[] taskDirs) {
     mr.waitUntilIdle();
     int trackers = mr.getNumTaskTrackers();
-    List neededDirs = new ArrayList(Arrays.asList(taskDirs));
+    List<String> neededDirs = new ArrayList<String>(Arrays.asList(taskDirs));
     boolean[] found = new boolean[taskDirs.length];
     for(int i=0; i < trackers; ++i) {
       int numNotDel = 0;

+ 7 - 7
src/test/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java

@@ -160,30 +160,30 @@ public class TestJobControl extends junit.framework.TestCase {
     cleanData(fs, outdir_3);
     cleanData(fs, outdir_4);
 
-    ArrayList dependingJobs = null;
+    ArrayList<Job> dependingJobs = null;
 
-    ArrayList inPaths_1 = new ArrayList();
+    ArrayList<Path> inPaths_1 = new ArrayList<Path>();
     inPaths_1.add(indir);
     JobConf jobConf_1 = createCopyJob(inPaths_1, outdir_1);
     Job job_1 = new Job(jobConf_1, dependingJobs);
-    ArrayList inPaths_2 = new ArrayList();
+    ArrayList<Path> inPaths_2 = new ArrayList<Path>();
     inPaths_2.add(indir);
     JobConf jobConf_2 = createCopyJob(inPaths_2, outdir_2);
     Job job_2 = new Job(jobConf_2, dependingJobs);
 
-    ArrayList inPaths_3 = new ArrayList();
+    ArrayList<Path> inPaths_3 = new ArrayList<Path>();
     inPaths_3.add(outdir_1);
     inPaths_3.add(outdir_2);
     JobConf jobConf_3 = createCopyJob(inPaths_3, outdir_3);
-    dependingJobs = new ArrayList();
+    dependingJobs = new ArrayList<Job>();
     dependingJobs.add(job_1);
     dependingJobs.add(job_2);
     Job job_3 = new Job(jobConf_3, dependingJobs);
 
-    ArrayList inPaths_4 = new ArrayList();
+    ArrayList<Path> inPaths_4 = new ArrayList<Path>();
     inPaths_4.add(outdir_3);
     JobConf jobConf_4 = createCopyJob(inPaths_4, outdir_4);
-    dependingJobs = new ArrayList();
+    dependingJobs = new ArrayList<Job>();
     dependingJobs.add(job_3);
     Job job_4 = new Job(jobConf_4, dependingJobs);