Browse Source

svn merge -c 1582015 from trunk for HADOOP-10437. Fix the javac warnings in the conf and the util package.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2.4@1582018 13f79535-47bb-0310-9956-ffa450edef68
Tsz-wo Sze 11 years ago
parent
commit
07de00eb98
15 changed files with 35 additions and 41 deletions
  1. 3 0
      hadoop-common-project/hadoop-common/CHANGES.txt
  2. 6 6
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
  3. 2 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java
  4. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableName.java
  5. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java
  6. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ClassUtil.java
  7. 0 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
  8. 4 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
  9. 2 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java
  10. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java
  11. 2 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProgramDriver.java
  12. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java
  13. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
  14. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
  15. 2 7
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java

+ 3 - 0
hadoop-common-project/hadoop-common/CHANGES.txt

@@ -129,6 +129,9 @@ Release 2.4.0 - UNRELEASED
     HADOOP-10440. HarFsInputStream.read(byte[]) updates position incorrectly.
     (guodongdong via szetszwo)
 
+    HADOOP-10437. Fix the javac warnings in the conf and the util package.
+    (szetszwo)
+
   BREAKDOWN OF HADOOP-10184 SUBTASKS AND RELATED JIRAS
 
     HADOOP-10185. FileSystem API for ACLs. (cnauroth)

+ 6 - 6
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java

@@ -2251,13 +2251,13 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
         root = (Element)resource;
       }
 
-      if (doc == null && root == null) {
-        if (quiet)
-          return null;
-        throw new RuntimeException(resource + " not found");
-      }
-
       if (root == null) {
+        if (doc == null) {
+          if (quiet) {
+            return null;
+          }
+          throw new RuntimeException(resource + " not found");
+        }
         root = doc.getDocumentElement();
       }
       Properties toAddTo = properties;

+ 2 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java

@@ -131,15 +131,14 @@ public class ReconfigurationServlet extends HttpServlet {
 
   @SuppressWarnings("unchecked")
   private Enumeration<String> getParams(HttpServletRequest req) {
-    return (Enumeration<String>) req.getParameterNames();
+    return req.getParameterNames();
   }
 
   /**
    * Apply configuratio changes after admin has approved them.
    */
   private void applyChanges(PrintWriter out, Reconfigurable reconf,
-                            HttpServletRequest req) 
-    throws IOException, ReconfigurationException {
+      HttpServletRequest req) throws ReconfigurationException {
     Configuration oldConf = reconf.getConf();
     Configuration newConf = new Configuration();
 

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableName.java

@@ -47,18 +47,18 @@ public class WritableName {
 
   /** Set the name that a class should be known as to something other than the
    * class name. */
-  public static synchronized void setName(Class writableClass, String name) {
+  public static synchronized void setName(Class<?> writableClass, String name) {
     CLASS_TO_NAME.put(writableClass, name);
     NAME_TO_CLASS.put(name, writableClass);
   }
 
   /** Add an alternate name for a class. */
-  public static synchronized void addName(Class writableClass, String name) {
+  public static synchronized void addName(Class<?> writableClass, String name) {
     NAME_TO_CLASS.put(name, writableClass);
   }
 
   /** Return the name for a class.  Default is {@link Class#getName()}. */
-  public static synchronized String getName(Class writableClass) {
+  public static synchronized String getName(Class<?> writableClass) {
     String name = CLASS_TO_NAME.get(writableClass);
     if (name != null)
       return name;

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java

@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.util;
 
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -26,6 +25,7 @@ import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.ThreadFactory;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -68,7 +68,7 @@ public class AsyncDiskService {
    * 
    * @param volumes The roots of the file system volumes.
    */
-  public AsyncDiskService(String[] volumes) throws IOException {
+  public AsyncDiskService(String[] volumes) {
     
     threadFactory = new ThreadFactory() {
       @Override

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ClassUtil.java

@@ -36,13 +36,13 @@ public class ClassUtil {
    * @return a jar file that contains the class, or null.
    * @throws IOException
    */
-  public static String findContainingJar(Class clazz) {
+  public static String findContainingJar(Class<?> clazz) {
     ClassLoader loader = clazz.getClassLoader();
     String classFile = clazz.getName().replaceAll("\\.", "/") + ".class";
     try {
-      for (Enumeration itr = loader.getResources(classFile);
+      for(final Enumeration<URL> itr = loader.getResources(classFile);
           itr.hasMoreElements();) {
-        URL url = (URL) itr.nextElement();
+        final URL url = itr.nextElement();
         if ("jar".equals(url.getProtocol())) {
           String toReturn = url.getPath();
           if (toReturn.startsWith("file:")) {

+ 0 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java

@@ -27,7 +27,6 @@ import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.util.Shell;
 
 /**
  * Class that provides utility functions for checking disk problem
@@ -35,9 +34,6 @@ import org.apache.hadoop.util.Shell;
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class DiskChecker {
-
-  private static final long SHELL_TIMEOUT = 10 * 1000;
-
   public static class DiskErrorException extends IOException {
     public DiskErrorException(String msg) {
       super(msg);

+ 4 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java

@@ -25,7 +25,6 @@ import java.net.URISyntaxException;
 import java.net.URL;
 import java.net.URLClassLoader;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.List;
 
 import org.apache.commons.cli.CommandLine;
@@ -397,7 +396,8 @@ public class GenericOptionsParser {
         if (!localFs.exists(path)) {
           throw new FileNotFoundException("File " + tmp + " does not exist.");
         }
-        finalPath = path.makeQualified(localFs).toString();
+        finalPath = path.makeQualified(localFs.getUri(),
+            localFs.getWorkingDirectory()).toString();
       }
       else {
         // check if the file exists in this file system
@@ -408,7 +408,8 @@ public class GenericOptionsParser {
         if (!fs.exists(path)) {
           throw new FileNotFoundException("File " + tmp + " does not exist.");
         }
-        finalPath = path.makeQualified(fs).toString();
+        finalPath = path.makeQualified(fs.getUri(),
+            fs.getWorkingDirectory()).toString();
       }
       finalArr[i] = finalPath;
     }

+ 2 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java

@@ -169,9 +169,8 @@ public class HostsFileReader {
     this.excludesFile = excludesFile;
   }
 
-  public synchronized void updateFileNames(String includesFile, 
-                                           String excludesFile) 
-                                           throws IOException {
+  public synchronized void updateFileNames(String includesFile,
+      String excludesFile) {
     setIncludesFile(includesFile);
     setExcludesFile(excludesFile);
   }

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java

@@ -245,7 +245,7 @@ public class LineReader implements Closeable {
       }
     } while (newlineLength == 0 && bytesConsumed < maxBytesToConsume);
 
-    if (bytesConsumed > (long)Integer.MAX_VALUE) {
+    if (bytesConsumed > Integer.MAX_VALUE) {
       throw new IOException("Too many bytes before newline: " + bytesConsumed);
     }
     return (int)bytesConsumed;
@@ -343,7 +343,7 @@ public class LineReader implements Closeable {
       }
     } while (delPosn < recordDelimiterBytes.length 
         && bytesConsumed < maxBytesToConsume);
-    if (bytesConsumed > (long) Integer.MAX_VALUE) {
+    if (bytesConsumed > Integer.MAX_VALUE) {
       throw new IOException("Too many bytes before delimiter: " + bytesConsumed);
     }
     return (int) bytesConsumed; 

+ 2 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProgramDriver.java

@@ -99,7 +99,8 @@ public class ProgramDriver {
    * @throws NoSuchMethodException 
    * @throws SecurityException 
    */
-  public void addClass (String name, Class mainClass, String description) throws Throwable {
+  public void addClass(String name, Class<?> mainClass, String description)
+      throws Throwable {
     programs.put(name , new ProgramDescription(mainClass, description));
   }
     

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java

@@ -64,7 +64,7 @@ public class Progress {
   public synchronized Progress addPhase() {
     Progress phase = addNewPhase();
     // set equal weightage for all phases
-    progressPerPhase = 1.0f / (float)phases.size();
+    progressPerPhase = 1.0f / phases.size();
     fixedWeightageForAllPhases = true;
     return phase;
   }
@@ -110,7 +110,7 @@ public class Progress {
       addNewPhase();
     }
     // set equal weightage for all phases
-    progressPerPhase = 1.0f / (float)phases.size();
+    progressPerPhase = 1.0f / phases.size();
     fixedWeightageForAllPhases = true;
   }
 

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java

@@ -78,7 +78,7 @@ public class RunJar {
     try {
       Enumeration<JarEntry> entries = jar.entries();
       while (entries.hasMoreElements()) {
-        JarEntry entry = (JarEntry)entries.nextElement();
+        final JarEntry entry = entries.nextElement();
         if (!entry.isDirectory() &&
             unpackRegex.matcher(entry.getName()).matches()) {
           InputStream in = jar.getInputStream(entry);

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java

@@ -431,7 +431,7 @@ public class StringUtils {
     ArrayList<String> strList = new ArrayList<String>();
     int startIndex = 0;
     int nextIndex = 0;
-    while ((nextIndex = str.indexOf((int)separator, startIndex)) != -1) {
+    while ((nextIndex = str.indexOf(separator, startIndex)) != -1) {
       strList.add(str.substring(startIndex, nextIndex));
       startIndex = nextIndex + 1;
     }

+ 2 - 7
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java

@@ -19,18 +19,13 @@
 package org.apache.hadoop.util;
 
 import java.io.IOException;
-import java.net.URL;
-import java.net.URLDecoder;
-import java.util.Enumeration;
+import java.io.InputStream;
+import java.util.Properties;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Properties;
 import org.apache.hadoop.io.IOUtils;
 
 /**