瀏覽代碼

Merge trunk into QJM branch

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-3077@1380990 13f79535-47bb-0310-9956-ffa450edef68
Todd Lipcon 12 年之前
父節點
當前提交
99ec5bd8d3
共有 100 個文件被更改,包括 619 次插入133 次删除
  1. 22 2
      BUILDING.txt
  2. 1 0
      hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/RootDocProcessor.java
  3. 44 0
      hadoop-common-project/hadoop-common/CHANGES.txt
  4. 50 1
      hadoop-common-project/hadoop-common/src/JNIFlags.cmake
  5. 8 0
      hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
  6. 4 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
  7. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java
  8. 0 11
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java
  9. 0 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
  10. 5 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java
  11. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java
  12. 7 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BufferedFSInputStream.java
  13. 15 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
  14. 9 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
  15. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java
  16. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java
  17. 4 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DU.java
  18. 7 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java
  19. 7 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
  20. 6 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputStream.java
  21. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java
  22. 2 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java
  23. 26 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
  24. 5 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java
  25. 9 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
  26. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
  27. 26 12
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
  28. 0 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java
  29. 3 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsServerDefaults.java
  30. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
  31. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java
  32. 0 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlConnection.java
  33. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java
  34. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobFilter.java
  35. 26 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
  36. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java
  37. 3 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocatedFileStatus.java
  38. 10 10
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java
  39. 0 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java
  40. 4 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java
  41. 30 6
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
  42. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
  43. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java
  44. 9 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java
  45. 17 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSImpl.java
  46. 10 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSInputStream.java
  47. 4 5
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSOutputStream.java
  48. 8 5
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java
  49. 6 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java
  50. 14 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
  51. 4 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java
  52. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java
  53. 12 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java
  54. 3 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFormat.java
  55. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
  56. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
  57. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsCommand.java
  58. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
  59. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
  60. 0 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/NotInMountpointException.java
  61. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
  62. 2 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFsFileStatus.java
  63. 4 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
  64. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java
  65. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java
  66. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java
  67. 4 5
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFCRpcServer.java
  68. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
  69. 4 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java
  70. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayWritable.java
  71. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java
  72. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java
  73. 4 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java
  74. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java
  75. 0 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputByteBuffer.java
  76. 3 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java
  77. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java
  78. 8 7
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java
  79. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java
  80. 5 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java
  81. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java
  82. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java
  83. 8 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java
  84. 8 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java
  85. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
  86. 14 16
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java
  87. 4 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java
  88. 7 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java
  89. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java
  90. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java
  91. 0 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java
  92. 32 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
  93. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java
  94. 18 20
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java
  95. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Stringifier.java
  96. 8 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
  97. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/TwoDArrayWritable.java
  98. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java
  99. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java
  100. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java

+ 22 - 2
BUILDING.txt

@@ -54,12 +54,32 @@ Maven build goals:
  Build options:
 
   * Use -Pnative to compile/bundle native code
-  * Use -Dsnappy.prefix=(/usr/local) & -Dbundle.snappy=(false) to compile
-    Snappy JNI bindings and to bundle Snappy SO files
   * Use -Pdocs to generate & bundle the documentation in the distribution (using -Pdist)
   * Use -Psrc to create a project source TAR.GZ
   * Use -Dtar to create a TAR with the distribution (using -Pdist)
 
+ Snappy build options:
+
+   Snappy is a compression library that can be utilized by the native code.
+   It is currently an optional component, meaning that Hadoop can be built with
+   or without this dependency.
+
+  * Use -Drequire.snappy to fail the build if libsnappy.so is not found.
+    If this option is not specified and the snappy library is missing,
+    we silently build a version of libhadoop.so that cannot make use of snappy.
+    This option is recommended if you plan on making use of snappy and want
+    to get more repeatable builds.
+
+  * Use -Dsnappy.prefix to specify a nonstandard location for the libsnappy
+    header files and library files. You do not need this option if you have
+    installed snappy using a package manager.
+  * Use -Dsnappy.lib to specify a nonstandard location for the libsnappy library
+    files.  Similarly to snappy.prefix, you do not need this option if you have
+    installed snappy using a package manager.
+  * Use -Dbundle.snappy to copy the contents of the snappy.lib directory into
+    the final tar file. This option requires that -Dsnappy.lib is also given,
+    and it ignores the -Dsnappy.prefix option.
+
    Tests options:
 
   * Use -DskipTests to skip tests when running the following Maven goals:

+ 1 - 0
hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/RootDocProcessor.java

@@ -97,6 +97,7 @@ class RootDocProcessor {
       this.target = target;
     }
     
+    @Override
     public Object invoke(Object proxy, Method method, Object[] args)
 	throws Throwable {
       String methodName = method.getName();

+ 44 - 0
hadoop-common-project/hadoop-common/CHANGES.txt

@@ -95,6 +95,14 @@ Trunk (unreleased changes)
     the message is printed and the stack trace is not printed to avoid chatter.
     (Brandon Li via Suresh)
 
+    HADOOP-8719. Workaround for kerberos-related log errors upon running any
+    hadoop command on OSX. (Jianbin Wei via harsh)
+
+    HADOOP-8619. WritableComparator must implement no-arg constructor.
+    (Chris Douglas via Suresh)
+
+    HADOOP-8736. Add Builder for building RPC server. (Brandon Li via Suresh)
+
   BUG FIXES
 
     HADOOP-8177. MBeans shouldn't try to register when it fails to create MBeanName.
@@ -191,6 +199,9 @@ Trunk (unreleased changes)
     HADOOP-8623. hadoop jar command should respect HADOOP_OPTS.
     (Steven Willis via suresh)
 
+    HADOOP-8684. Deadlock between WritableComparator and WritableComparable.
+    (Jing Zhao via suresh)
+
   OPTIMIZATIONS
 
     HADOOP-7761. Improve the performance of raw comparisons. (todd)
@@ -314,6 +325,9 @@ Branch-2 ( Unreleased changes )
     HADOOP-8075. Lower native-hadoop library log from info to debug.
     (Hızır Sefa İrken via eli)
 
+    HADOOP-8748. Refactor DFSClient retry utility methods to a new class
+    in org.apache.hadoop.io.retry.  (Arun C Murthy via szetszwo)
+
   BUG FIXES
 
     HADOOP-8372. NetUtils.normalizeHostName() incorrectly handles hostname
@@ -439,6 +453,18 @@ Branch-2 ( Unreleased changes )
     HADOOP-8031. Configuration class fails to find embedded .jar resources; 
     should use URL.openStream() (genman via tucu)
 
+    HADOOP-8738. junit JAR is showing up in the distro (tucu)
+
+    HADOOP-8737. cmake: always use JAVA_HOME to find libjvm.so, jni.h, jni_md.h.
+    (Colin Patrick McCabe via eli)
+
+    HADOOP-8747. Syntax error on cmake version 2.6 patch 2 in JNIFlags.cmake. (cmccabe via tucu)
+
+    HADOOP-8722. Update BUILDING.txt with latest snappy info.
+    (Colin Patrick McCabe via eli)
+
+    HADOOP-8764. CMake: HADOOP-8737 broke ARM build. (Trevor Robinson via eli)
+
   BREAKDOWN OF HDFS-3042 SUBTASKS
 
     HADOOP-8220. ZKFailoverController doesn't handle failure to become active
@@ -841,6 +867,18 @@ Release 2.0.0-alpha - 05-23-2012
     HADOOP-8655. Fix TextInputFormat for large deliminators. (Gelesh via
     bobby) 
 
+Release 0.23.4 - UNRELEASED
+
+  INCOMPATIBLE CHANGES
+
+  NEW FEATURES
+
+  IMPROVEMENTS
+
+  OPTIMIZATIONS
+
+  BUG FIXES
+
 Release 0.23.3 - UNRELEASED
 
   INCOMPATIBLE CHANGES
@@ -977,6 +1015,12 @@ Release 0.23.3 - UNRELEASED
 
     HADOOP-8725. MR is broken when security is off (daryn via bobby)
 
+    HADOOP-8726. The Secrets in Credentials are not available to MR tasks
+    (daryn and Benoy Antony via bobby)
+
+    HADOOP-8727. Gracefully deprecate dfs.umaskmode in 2.x onwards (Harsh J
+    via bobby)
+
 Release 0.23.2 - UNRELEASED 
 
   INCOMPATIBLE CHANGES

+ 50 - 1
hadoop-common-project/hadoop-common/src/JNIFlags.cmake

@@ -65,4 +65,53 @@ if (CMAKE_SYSTEM_PROCESSOR MATCHES "^arm" AND CMAKE_SYSTEM_NAME STREQUAL "Linux"
     endif (READELF MATCHES "NOTFOUND")
 endif (CMAKE_SYSTEM_PROCESSOR MATCHES "^arm" AND CMAKE_SYSTEM_NAME STREQUAL "Linux")
 
-find_package(JNI REQUIRED)
+IF("${CMAKE_SYSTEM}" MATCHES "Linux")
+    #
+    # Locate JNI_INCLUDE_DIRS and JNI_LIBRARIES.
+    # Since we were invoked from Maven, we know that the JAVA_HOME environment
+    # variable is valid.  So we ignore system paths here and just use JAVA_HOME.
+    #
+    FILE(TO_CMAKE_PATH "$ENV{JAVA_HOME}" _JAVA_HOME)
+    IF(CMAKE_SYSTEM_PROCESSOR MATCHES "^i.86$")
+        SET(_java_libarch "i386")
+    ELSEIF (CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "amd64")
+        SET(_java_libarch "amd64")
+    ELSEIF (CMAKE_SYSTEM_PROCESSOR MATCHES "^arm")
+        SET(_java_libarch "arm")
+    ELSE()
+        SET(_java_libarch ${CMAKE_SYSTEM_PROCESSOR})
+    ENDIF()
+    SET(_JDK_DIRS "${_JAVA_HOME}/jre/lib/${_java_libarch}/*"
+                  "${_JAVA_HOME}/jre/lib/${_java_libarch}"
+                  "${_JAVA_HOME}/jre/lib/*"
+                  "${_JAVA_HOME}/jre/lib"
+                  "${_JAVA_HOME}/lib/*"
+                  "${_JAVA_HOME}/lib"
+                  "${_JAVA_HOME}/include/*"
+                  "${_JAVA_HOME}/include"
+                  "${_JAVA_HOME}"
+    )
+    FIND_PATH(JAVA_INCLUDE_PATH
+        NAMES jni.h 
+        PATHS ${_JDK_DIRS}
+        NO_DEFAULT_PATH)
+    FIND_PATH(JAVA_INCLUDE_PATH2 
+        NAMES jni_md.h
+        PATHS ${_JDK_DIRS}
+        NO_DEFAULT_PATH)
+    SET(JNI_INCLUDE_DIRS ${JAVA_INCLUDE_PATH} ${JAVA_INCLUDE_PATH2})
+    FIND_LIBRARY(JAVA_JVM_LIBRARY
+        NAMES jvm JavaVM
+        PATHS ${_JDK_DIRS}
+        NO_DEFAULT_PATH)
+    SET(JNI_LIBRARIES ${JAVA_JVM_LIBRARY})
+    MESSAGE("JAVA_HOME=${JAVA_HOME}, JAVA_JVM_LIBRARY=${JAVA_JVM_LIBRARY}")
+    MESSAGE("JAVA_INCLUDE_PATH=${JAVA_INCLUDE_PATH}, JAVA_INCLUDE_PATH2=${JAVA_INCLUDE_PATH2}")
+    IF(JAVA_JVM_LIBRARY AND JAVA_INCLUDE_PATH AND JAVA_INCLUDE_PATH2)
+        MESSAGE("Located all JNI components successfully.")
+    ELSE()
+        MESSAGE(FATAL_ERROR "Failed to find a viable JVM installation under JAVA_HOME.")
+    ENDIF()
+ELSE()
+    find_package(JNI REQUIRED)
+ENDIF()

+ 8 - 0
hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh

@@ -47,6 +47,14 @@ done
 # Extra Java runtime options.  Empty by default.
 export HADOOP_OPTS="-Djava.net.preferIPv4Stack=true $HADOOP_CLIENT_OPTS"
 
+MAC_OSX=false
+case "`uname`" in
+Darwin*) MAC_OSX=true;;
+esac
+if $MAC_OSX; then
+    export HADOOP_OPTS="$HADOOP_OPTS -Djava.security.krb5.realm= -Djava.security.krb5.kdc="
+fi
+
 # Command specific options appended to HADOOP_OPTS when specified
 export HADOOP_NAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER:-INFO,NullAppender} $HADOOP_NAMENODE_OPTS"
 export HADOOP_DATANODE_OPTS="-Dhadoop.security.logger=ERROR,RFAS $HADOOP_DATANODE_OPTS"

+ 4 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java

@@ -1847,6 +1847,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
    * 
    * @return an iterator over the entries.
    */
+  @Override
   public Iterator<Map.Entry<String, String>> iterator() {
     // Get a copy of just the string to string pairs. After the old object
     // methods that allow non-strings to be put into configurations are removed,
@@ -2272,6 +2273,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
   }
 
   //@Override
+  @Override
   public void write(DataOutput out) throws IOException {
     Properties props = getProps();
     WritableUtils.writeVInt(out, props.size());
@@ -2322,6 +2324,8 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
                new String[]{CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY});
     Configuration.addDeprecation("fs.default.name", 
                new String[]{CommonConfigurationKeys.FS_DEFAULT_NAME_KEY});
+    Configuration.addDeprecation("dfs.umaskmode",
+        new String[]{CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY});
   }
   
   /**

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java

@@ -39,11 +39,13 @@ public class Configured implements Configurable {
   }
 
   // inherit javadoc
+  @Override
   public void setConf(Configuration conf) {
     this.conf = conf;
   }
 
   // inherit javadoc
+  @Override
   public Configuration getConf() {
     return conf;
   }

+ 0 - 11
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java

@@ -23,12 +23,10 @@ import org.apache.commons.logging.*;
 import org.apache.commons.lang.StringEscapeUtils;
 
 import java.util.Collection;
-import java.util.Map;
 import java.util.Enumeration;
 import java.io.IOException;
 import java.io.PrintWriter;
 
-import javax.servlet.ServletContext;
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
@@ -57,9 +55,6 @@ public class ReconfigurationServlet extends HttpServlet {
   public static final String CONF_SERVLET_RECONFIGURABLE_PREFIX =
     "conf.servlet.reconfigurable.";
   
-  /**
-   * {@inheritDoc}
-   */
   @Override
   public void init() throws ServletException {
     super.init();
@@ -202,9 +197,6 @@ public class ReconfigurationServlet extends HttpServlet {
     }
   }
 
-  /**
-   * {@inheritDoc}
-   */
   @Override
   protected void doGet(HttpServletRequest req, HttpServletResponse resp)
     throws ServletException, IOException {
@@ -219,9 +211,6 @@ public class ReconfigurationServlet extends HttpServlet {
     printFooter(out);
   }
 
-  /**
-   * {@inheritDoc}
-   */
   @Override
   protected void doPost(HttpServletRequest req, HttpServletResponse resp)
     throws ServletException, IOException {

+ 0 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java

@@ -47,7 +47,6 @@ import org.apache.hadoop.fs.InvalidPathException;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.util.DataChecksum;
 import org.apache.hadoop.util.Progressable;
 
 /**

+ 5 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java

@@ -45,22 +45,27 @@ public class AvroFSInput implements Closeable, SeekableInput {
     this.stream = fc.open(p);
   }
 
+  @Override
   public long length() {
     return len;
   }
 
+  @Override
   public int read(byte[] b, int off, int len) throws IOException {
     return stream.read(b, off, len);
   }
 
+  @Override
   public void seek(long p) throws IOException {
     stream.seek(p);
   }
 
+  @Override
   public long tell() throws IOException {
     return stream.getPos();
   }
 
+  @Override
   public void close() throws IOException {
     stream.close();
   }

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java

@@ -204,6 +204,7 @@ public class BlockLocation {
     }
   }
 
+  @Override
   public String toString() {
     StringBuilder result = new StringBuilder();
     result.append(offset);

+ 7 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BufferedFSInputStream.java

@@ -19,7 +19,6 @@ package org.apache.hadoop.fs;
 
 import java.io.BufferedInputStream;
 import java.io.FileDescriptor;
-import java.io.FileInputStream;
 import java.io.IOException;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -50,10 +49,12 @@ implements Seekable, PositionedReadable, HasFileDescriptor {
     super(in, size);
   }
 
+  @Override
   public long getPos() throws IOException {
     return ((FSInputStream)in).getPos()-(count-pos);
   }
 
+  @Override
   public long skip(long n) throws IOException {
     if (n <= 0) {
       return 0;
@@ -63,6 +64,7 @@ implements Seekable, PositionedReadable, HasFileDescriptor {
     return n;
   }
 
+  @Override
   public void seek(long pos) throws IOException {
     if( pos<0 ) {
       return;
@@ -82,20 +84,24 @@ implements Seekable, PositionedReadable, HasFileDescriptor {
     ((FSInputStream)in).seek(pos);
   }
 
+  @Override
   public boolean seekToNewSource(long targetPos) throws IOException {
     pos = 0;
     count = 0;
     return ((FSInputStream)in).seekToNewSource(targetPos);
   }
 
+  @Override
   public int read(long position, byte[] buffer, int offset, int length) throws IOException {
     return ((FSInputStream)in).read(position, buffer, offset, length) ;
   }
 
+  @Override
   public void readFully(long position, byte[] buffer, int offset, int length) throws IOException {
     ((FSInputStream)in).readFully(position, buffer, offset, length);
   }
 
+  @Override
   public void readFully(long position, byte[] buffer) throws IOException {
     ((FSInputStream)in).readFully(position, buffer);
   }

+ 15 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java

@@ -53,6 +53,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
     super(fs);
   }
 
+  @Override
   public void setConf(Configuration conf) {
     super.setConf(conf);
     if (conf != null) {
@@ -64,6 +65,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
   /**
    * Set whether to verify checksum.
    */
+  @Override
   public void setVerifyChecksum(boolean verifyChecksum) {
     this.verifyChecksum = verifyChecksum;
   }
@@ -74,6 +76,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
   }
   
   /** get the raw file system */
+  @Override
   public FileSystem getRawFileSystem() {
     return fs;
   }
@@ -162,14 +165,17 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
       return HEADER_LENGTH + 4*(dataPos/bytesPerSum);
     }
     
+    @Override
     protected long getChunkPosition( long dataPos ) {
       return dataPos/bytesPerSum*bytesPerSum;
     }
     
+    @Override
     public int available() throws IOException {
       return datas.available() + super.available();
     }
     
+    @Override
     public int read(long position, byte[] b, int off, int len)
       throws IOException {
       // parameter check
@@ -190,6 +196,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
       return nread;
     }
     
+    @Override
     public void close() throws IOException {
       datas.close();
       if( sums != null ) {
@@ -290,6 +297,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
      * @exception  IOException  if an I/O error occurs.
      *             ChecksumException if the chunk to skip to is corrupted
      */
+    @Override
     public synchronized long skip(long n) throws IOException {
       long curPos = getPos();
       long fileLength = getFileLength();
@@ -311,6 +319,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
      *             ChecksumException if the chunk to seek to is corrupted
      */
 
+    @Override
     public synchronized void seek(long pos) throws IOException {
       if(pos>getFileLength()) {
         throw new IOException("Cannot seek after EOF");
@@ -339,7 +348,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
     return new FSDataBoundedInputStream(fs, f, in);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public FSDataOutputStream append(Path f, int bufferSize,
       Progressable progress) throws IOException {
     throw new IOException("Not supported");
@@ -398,6 +407,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
       sums.writeInt(bytesPerSum);
     }
     
+    @Override
     public void close() throws IOException {
       flushBuffer();
       sums.close();
@@ -412,7 +422,6 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
     }
   }
 
-  /** {@inheritDoc} */
   @Override
   public FSDataOutputStream create(Path f, FsPermission permission,
       boolean overwrite, int bufferSize, short replication, long blockSize,
@@ -454,7 +463,6 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
     return out;
   }
 
-  /** {@inheritDoc} */
   @Override
   public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
       boolean overwrite, int bufferSize, short replication, long blockSize,
@@ -472,6 +480,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
    * @return true if successful;
    *         false if file does not exist or is a directory
    */
+  @Override
   public boolean setReplication(Path src, short replication) throws IOException {
     boolean value = fs.setReplication(src, replication);
     if (!value)
@@ -487,6 +496,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
   /**
    * Rename files/dirs
    */
+  @Override
   public boolean rename(Path src, Path dst) throws IOException {
     if (fs.isDirectory(src)) {
       return fs.rename(src, dst);
@@ -516,6 +526,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
    * Implement the delete(Path, boolean) in checksum
    * file system.
    */
+  @Override
   public boolean delete(Path f, boolean recursive) throws IOException{
     FileStatus fstatus = null;
     try {
@@ -538,6 +549,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
   }
     
   final private static PathFilter DEFAULT_FILTER = new PathFilter() {
+    @Override
     public boolean accept(Path file) {
       return !isChecksumFile(file);
     }

+ 9 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java

@@ -32,7 +32,6 @@ import org.apache.hadoop.fs.Options.ChecksumOpt;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.PureJavaCrc32;
-import org.apache.hadoop.util.StringUtils;
 
 /**
  * Abstract Checksumed Fs.
@@ -61,6 +60,7 @@ public abstract class ChecksumFs extends FilterFs {
   /**
    * Set whether to verify checksum.
    */
+  @Override
   public void setVerifyChecksum(boolean inVerifyChecksum) {
     this.verifyChecksum = inVerifyChecksum;
   }
@@ -152,14 +152,17 @@ public abstract class ChecksumFs extends FilterFs {
       return HEADER_LENGTH + 4*(dataPos/bytesPerSum);
     }
     
+    @Override
     protected long getChunkPosition(long dataPos) {
       return dataPos/bytesPerSum*bytesPerSum;
     }
     
+    @Override
     public int available() throws IOException {
       return datas.available() + super.available();
     }
     
+    @Override
     public int read(long position, byte[] b, int off, int len)
       throws IOException, UnresolvedLinkException {
       // parameter check
@@ -180,6 +183,7 @@ public abstract class ChecksumFs extends FilterFs {
       return nread;
     }
     
+    @Override
     public void close() throws IOException {
       datas.close();
       if (sums != null) {
@@ -258,6 +262,7 @@ public abstract class ChecksumFs extends FilterFs {
      * @exception  IOException  if an I/O error occurs.
      *             ChecksumException if the chunk to skip to is corrupted
      */
+    @Override
     public synchronized long skip(long n) throws IOException { 
       final long curPos = getPos();
       final long fileLength = getFileLength();
@@ -279,6 +284,7 @@ public abstract class ChecksumFs extends FilterFs {
      *             ChecksumException if the chunk to seek to is corrupted
      */
 
+    @Override
     public synchronized void seek(long pos) throws IOException { 
       if (pos>getFileLength()) {
         throw new IOException("Cannot seek after EOF");
@@ -348,6 +354,7 @@ public abstract class ChecksumFs extends FilterFs {
       sums.writeInt(bytesPerSum);
     }
     
+    @Override
     public void close() throws IOException {
       flushBuffer();
       sums.close();
@@ -447,6 +454,7 @@ public abstract class ChecksumFs extends FilterFs {
    * Implement the delete(Path, boolean) in checksum
    * file system.
    */
+  @Override
   public boolean delete(Path f, boolean recursive) 
     throws IOException, UnresolvedLinkException {
     FileStatus fstatus = null;

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java

@@ -75,7 +75,7 @@ public class ContentSummary implements Writable{
   /** Returns (disk) space quota */
   public long getSpaceQuota() {return spaceQuota;}
   
-  /** {@inheritDoc} */
+  @Override
   @InterfaceAudience.Private
   public void write(DataOutput out) throws IOException {
     out.writeLong(length);
@@ -86,7 +86,7 @@ public class ContentSummary implements Writable{
     out.writeLong(spaceQuota);
   }
 
-  /** {@inheritDoc} */
+  @Override
   @InterfaceAudience.Private
   public void readFields(DataInput in) throws IOException {
     this.length = in.readLong();
@@ -131,7 +131,7 @@ public class ContentSummary implements Writable{
     return qOption ? QUOTA_HEADER : HEADER;
   }
   
-  /** {@inheritDoc} */
+  @Override
   public String toString() {
     return toString(true);
   }

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java

@@ -131,6 +131,7 @@ public class DF extends Shell {
     return mount;
   }
   
+  @Override
   public String toString() {
     return
       "df -k " + mount +"\n" +

+ 4 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DU.java

@@ -76,6 +76,7 @@ public class DU extends Shell {
    **/
   class DURefreshThread implements Runnable {
     
+    @Override
     public void run() {
       
       while(shouldRun) {
@@ -169,16 +170,19 @@ public class DU extends Shell {
     }
   }
   
+  @Override
   public String toString() {
     return
       "du -sk " + dirPath +"\n" +
       used + "\t" + dirPath;
   }
 
+  @Override
   protected String[] getExecString() {
     return new String[] {"du", "-sk", dirPath};
   }
   
+  @Override
   protected void parseExecResult(BufferedReader lines) throws IOException {
     String line = lines.readLine();
     if (line == null) {

+ 7 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java

@@ -44,6 +44,7 @@ public class FSDataInputStream extends DataInputStream
    *
    * @param desired offset to seek to
    */
+  @Override
   public synchronized void seek(long desired) throws IOException {
     ((Seekable)in).seek(desired);
   }
@@ -53,6 +54,7 @@ public class FSDataInputStream extends DataInputStream
    *
    * @return current position in the input stream
    */
+  @Override
   public long getPos() throws IOException {
     return ((Seekable)in).getPos();
   }
@@ -68,6 +70,7 @@ public class FSDataInputStream extends DataInputStream
    *         if there is no more data because the end of the stream has been
    *         reached
    */
+  @Override
   public int read(long position, byte[] buffer, int offset, int length)
     throws IOException {
     return ((PositionedReadable)in).read(position, buffer, offset, length);
@@ -85,6 +88,7 @@ public class FSDataInputStream extends DataInputStream
    *                      If an exception is thrown an undetermined number
    *                      of bytes in the buffer may have been written. 
    */
+  @Override
   public void readFully(long position, byte[] buffer, int offset, int length)
     throws IOException {
     ((PositionedReadable)in).readFully(position, buffer, offset, length);
@@ -93,6 +97,7 @@ public class FSDataInputStream extends DataInputStream
   /**
    * See {@link #readFully(long, byte[], int, int)}.
    */
+  @Override
   public void readFully(long position, byte[] buffer)
     throws IOException {
     ((PositionedReadable)in).readFully(position, buffer, 0, buffer.length);
@@ -104,6 +109,7 @@ public class FSDataInputStream extends DataInputStream
    * @param  targetPos  position to seek to
    * @return true if a new source is found, false otherwise
    */
+  @Override
   public boolean seekToNewSource(long targetPos) throws IOException {
     return ((Seekable)in).seekToNewSource(targetPos); 
   }
@@ -118,6 +124,7 @@ public class FSDataInputStream extends DataInputStream
     return in;
   }
 
+  @Override
   public int read(ByteBuffer buf) throws IOException {
     if (in instanceof ByteBufferReadable) {
       return ((ByteBufferReadable)in).read(buf);

+ 7 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java

@@ -140,6 +140,7 @@ abstract public class FSInputChecker extends FSInputStream {
    * @exception  IOException  if an I/O error occurs.
    */
 
+  @Override
   public synchronized int read() throws IOException {
     if (pos >= count) {
       fill();
@@ -180,6 +181,7 @@ abstract public class FSInputChecker extends FSInputStream {
    * @exception  IOException  if an I/O error occurs.
    *             ChecksumException if any checksum error occurs
    */
+  @Override
   public synchronized int read(byte[] b, int off, int len) throws IOException {
     // parameter check
     if ((off | len | (off + len) | (b.length - (off + len))) < 0) {
@@ -367,6 +369,7 @@ abstract public class FSInputChecker extends FSInputStream {
    * @exception  IOException  if an I/O error occurs.
    *             ChecksumException if the chunk to skip to is corrupted
    */
+  @Override
   public synchronized long skip(long n) throws IOException {
     if (n <= 0) {
       return 0;
@@ -389,6 +392,7 @@ abstract public class FSInputChecker extends FSInputStream {
    *             ChecksumException if the chunk to seek to is corrupted
    */
 
+  @Override
   public synchronized void seek(long pos) throws IOException {
     if( pos<0 ) {
       return;
@@ -462,13 +466,16 @@ abstract public class FSInputChecker extends FSInputStream {
     this.pos = 0;
   }
 
+  @Override
   final public boolean markSupported() {
     return false;
   }
   
+  @Override
   final public void mark(int readlimit) {
   }
   
+  @Override
   final public void reset() throws IOException {
     throw new IOException("mark/reset not supported");
   }

+ 6 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputStream.java

@@ -36,19 +36,23 @@ public abstract class FSInputStream extends InputStream
    * The next read() will be from that location.  Can't
    * seek past the end of the file.
    */
+  @Override
   public abstract void seek(long pos) throws IOException;
 
   /**
    * Return the current offset from the start of the file
    */
+  @Override
   public abstract long getPos() throws IOException;
 
   /**
    * Seeks a different copy of the data.  Returns true if 
    * found a new source, false otherwise.
    */
+  @Override
   public abstract boolean seekToNewSource(long targetPos) throws IOException;
 
+  @Override
   public int read(long position, byte[] buffer, int offset, int length)
     throws IOException {
     synchronized (this) {
@@ -64,6 +68,7 @@ public abstract class FSInputStream extends InputStream
     }
   }
     
+  @Override
   public void readFully(long position, byte[] buffer, int offset, int length)
     throws IOException {
     int nread = 0;
@@ -76,6 +81,7 @@ public abstract class FSInputStream extends InputStream
     }
   }
     
+  @Override
   public void readFully(long position, byte[] buffer)
     throws IOException {
     readFully(position, buffer, 0, buffer.length);

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java

@@ -55,6 +55,7 @@ abstract public class FSOutputSummer extends OutputStream {
   throws IOException;
 
   /** Write one byte */
+  @Override
   public synchronized void write(int b) throws IOException {
     sum.update(b);
     buf[count++] = (byte)b;
@@ -81,6 +82,7 @@ abstract public class FSOutputSummer extends OutputStream {
    * @param      len   the number of bytes to write.
    * @exception  IOException  if an I/O error occurs.
    */
+  @Override
   public synchronized void write(byte b[], int off, int len)
   throws IOException {
     if (off < 0 || len < 0 || off > b.length - len) {

+ 2 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java

@@ -37,6 +37,7 @@ public abstract class FileChecksum implements Writable {
   public abstract byte[] getBytes();
 
   /** Return true if both the algorithms and the values are the same. */
+  @Override
   public boolean equals(Object other) {
     if (other == this) {
       return true;
@@ -50,7 +51,7 @@ public abstract class FileChecksum implements Writable {
       && Arrays.equals(this.getBytes(), that.getBytes());
   }
   
-  /** {@inheritDoc} */
+  @Override
   public int hashCode() {
     return getAlgorithmName().hashCode() ^ Arrays.hashCode(getBytes());
   }

+ 26 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java

@@ -190,6 +190,7 @@ public final class FileContext {
     new FileContextFinalizer();
   
   private static final PathFilter DEFAULT_FILTER = new PathFilter() {
+    @Override
     public boolean accept(final Path file) {
       return true;
     }
@@ -318,6 +319,7 @@ public final class FileContext {
       throws UnsupportedFileSystemException, IOException {
     try {
       return user.doAs(new PrivilegedExceptionAction<AbstractFileSystem>() {
+        @Override
         public AbstractFileSystem run() throws UnsupportedFileSystemException {
           return AbstractFileSystem.get(uri, conf);
         }
@@ -660,6 +662,7 @@ public final class FileContext {
     final CreateOpts[] updatedOpts = 
                       CreateOpts.setOpt(CreateOpts.perms(permission), opts);
     return new FSLinkResolver<FSDataOutputStream>() {
+      @Override
       public FSDataOutputStream next(final AbstractFileSystem fs, final Path p) 
         throws IOException {
         return fs.create(p, createFlag, updatedOpts);
@@ -703,6 +706,7 @@ public final class FileContext {
     final FsPermission absFerms = (permission == null ? 
           FsPermission.getDefault() : permission).applyUMask(umask);
     new FSLinkResolver<Void>() {
+      @Override
       public Void next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         fs.mkdir(p, absFerms, createParent);
@@ -738,6 +742,7 @@ public final class FileContext {
       UnsupportedFileSystemException, IOException {
     Path absF = fixRelativePart(f);
     return new FSLinkResolver<Boolean>() {
+      @Override
       public Boolean next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         return Boolean.valueOf(fs.delete(p, recursive));
@@ -766,6 +771,7 @@ public final class FileContext {
       FileNotFoundException, UnsupportedFileSystemException, IOException {
     final Path absF = fixRelativePart(f);
     return new FSLinkResolver<FSDataInputStream>() {
+      @Override
       public FSDataInputStream next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         return fs.open(p);
@@ -796,6 +802,7 @@ public final class FileContext {
       UnsupportedFileSystemException, IOException {
     final Path absF = fixRelativePart(f);
     return new FSLinkResolver<FSDataInputStream>() {
+      @Override
       public FSDataInputStream next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         return fs.open(p, bufferSize);
@@ -826,6 +833,7 @@ public final class FileContext {
       IOException {
     final Path absF = fixRelativePart(f);
     return new FSLinkResolver<Boolean>() {
+      @Override
       public Boolean next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         return Boolean.valueOf(fs.setReplication(p, replication));
@@ -894,6 +902,7 @@ public final class FileContext {
        */
       final Path source = resolveIntermediate(absSrc);    
       new FSLinkResolver<Void>() {
+        @Override
         public Void next(final AbstractFileSystem fs, final Path p) 
           throws IOException, UnresolvedLinkException {
           fs.rename(source, p, options);
@@ -925,6 +934,7 @@ public final class FileContext {
       UnsupportedFileSystemException, IOException {
     final Path absF = fixRelativePart(f);
     new FSLinkResolver<Void>() {
+      @Override
       public Void next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         fs.setPermission(p, permission);
@@ -967,6 +977,7 @@ public final class FileContext {
     }
     final Path absF = fixRelativePart(f);
     new FSLinkResolver<Void>() {
+      @Override
       public Void next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         fs.setOwner(p, username, groupname);
@@ -1002,6 +1013,7 @@ public final class FileContext {
       UnsupportedFileSystemException, IOException {
     final Path absF = fixRelativePart(f);
     new FSLinkResolver<Void>() {
+      @Override
       public Void next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         fs.setTimes(p, mtime, atime);
@@ -1034,6 +1046,7 @@ public final class FileContext {
       IOException {
     final Path absF = fixRelativePart(f);
     return new FSLinkResolver<FileChecksum>() {
+      @Override
       public FileChecksum next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         return fs.getFileChecksum(p);
@@ -1089,6 +1102,7 @@ public final class FileContext {
       FileNotFoundException, UnsupportedFileSystemException, IOException {
     final Path absF = fixRelativePart(f);
     return new FSLinkResolver<FileStatus>() {
+      @Override
       public FileStatus next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         return fs.getFileStatus(p);
@@ -1135,6 +1149,7 @@ public final class FileContext {
       UnsupportedFileSystemException, IOException {
     final Path absF = fixRelativePart(f);
     return new FSLinkResolver<FileStatus>() {
+      @Override
       public FileStatus next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         FileStatus fi = fs.getFileLinkStatus(p);
@@ -1165,6 +1180,7 @@ public final class FileContext {
       FileNotFoundException, UnsupportedFileSystemException, IOException {
     final Path absF = fixRelativePart(f);
     return new FSLinkResolver<Path>() {
+      @Override
       public Path next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         FileStatus fi = fs.getFileLinkStatus(p);
@@ -1208,6 +1224,7 @@ public final class FileContext {
       UnsupportedFileSystemException, IOException {
     final Path absF = fixRelativePart(f);
     return new FSLinkResolver<BlockLocation[]>() {
+      @Override
       public BlockLocation[] next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         return fs.getFileBlockLocations(p, start, len);
@@ -1246,6 +1263,7 @@ public final class FileContext {
     }
     final Path absF = fixRelativePart(f);
     return new FSLinkResolver<FsStatus>() {
+      @Override
       public FsStatus next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         return fs.getFsStatus(p);
@@ -1339,6 +1357,7 @@ public final class FileContext {
       IOException { 
     final Path nonRelLink = fixRelativePart(link);
     new FSLinkResolver<Void>() {
+      @Override
       public Void next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         fs.createSymlink(target, p, createParent);
@@ -1373,6 +1392,7 @@ public final class FileContext {
       UnsupportedFileSystemException, IOException {
     final Path absF = fixRelativePart(f);
     return new FSLinkResolver<RemoteIterator<FileStatus>>() {
+      @Override
       public RemoteIterator<FileStatus> next(
           final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
@@ -1432,6 +1452,7 @@ public final class FileContext {
       UnsupportedFileSystemException, IOException {
     final Path absF = fixRelativePart(f);
     return new FSLinkResolver<RemoteIterator<LocatedFileStatus>>() {
+      @Override
       public RemoteIterator<LocatedFileStatus> next(
           final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
@@ -1703,6 +1724,7 @@ public final class FileContext {
         IOException {
       final Path absF = fixRelativePart(f);
       return new FSLinkResolver<FileStatus[]>() {
+        @Override
         public FileStatus[] next(final AbstractFileSystem fs, final Path p) 
           throws IOException, UnresolvedLinkException {
           return fs.listStatus(p);
@@ -2232,6 +2254,7 @@ public final class FileContext {
    * Deletes all the paths in deleteOnExit on JVM shutdown.
    */
   static class FileContextFinalizer implements Runnable {
+    @Override
     public synchronized void run() {
       processDeleteOnExit();
     }
@@ -2244,6 +2267,7 @@ public final class FileContext {
   protected Path resolve(final Path f) throws FileNotFoundException,
       UnresolvedLinkException, AccessControlException, IOException {
     return new FSLinkResolver<Path>() {
+      @Override
       public Path next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         return fs.resolvePath(p);
@@ -2259,6 +2283,7 @@ public final class FileContext {
    */
   protected Path resolveIntermediate(final Path f) throws IOException {
     return new FSLinkResolver<FileStatus>() {
+      @Override
       public FileStatus next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         return fs.getFileLinkStatus(p);
@@ -2281,6 +2306,7 @@ public final class FileContext {
     final HashSet<AbstractFileSystem> result 
       = new HashSet<AbstractFileSystem>();
     new FSLinkResolver<Void>() {
+      @Override
       public Void next(final AbstractFileSystem fs, final Path p)
           throws IOException, UnresolvedLinkException {
         result.add(fs);

+ 5 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java

@@ -253,6 +253,7 @@ public class FileStatus implements Writable, Comparable {
   //////////////////////////////////////////////////
   // Writable
   //////////////////////////////////////////////////
+  @Override
   public void write(DataOutput out) throws IOException {
     Text.writeString(out, getPath().toString(), Text.DEFAULT_MAX_LEN);
     out.writeLong(getLen());
@@ -270,6 +271,7 @@ public class FileStatus implements Writable, Comparable {
     }
   }
 
+  @Override
   public void readFields(DataInput in) throws IOException {
     String strPath = Text.readString(in, Text.DEFAULT_MAX_LEN);
     this.path = new Path(strPath);
@@ -299,6 +301,7 @@ public class FileStatus implements Writable, Comparable {
    * @throws ClassCastException if the specified object's is not of 
    *         type FileStatus
    */
+  @Override
   public int compareTo(Object o) {
     FileStatus other = (FileStatus)o;
     return this.getPath().compareTo(other.getPath());
@@ -308,6 +311,7 @@ public class FileStatus implements Writable, Comparable {
    * @param   o the object to be compared.
    * @return  true if two file status has the same path name; false if not.
    */
+  @Override
   public boolean equals(Object o) {
     if (o == null) {
       return false;
@@ -328,6 +332,7 @@ public class FileStatus implements Writable, Comparable {
    *
    * @return  a hash code value for the path name.
    */
+  @Override
   public int hashCode() {
     return getPath().hashCode();
   }

+ 9 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java

@@ -147,6 +147,7 @@ public abstract class FileSystem extends Configured implements Closeable {
     UserGroupInformation ugi =
         UserGroupInformation.getBestUGI(ticketCachePath, user);
     return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
+      @Override
       public FileSystem run() throws IOException {
         return get(uri, conf);
       }
@@ -332,6 +333,7 @@ public abstract class FileSystem extends Configured implements Closeable {
     UserGroupInformation ugi =
         UserGroupInformation.getBestUGI(ticketCachePath, user);
     return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
+      @Override
       public FileSystem run() throws IOException {
         return newInstance(uri,conf); 
       }
@@ -1389,6 +1391,7 @@ public abstract class FileSystem extends Configured implements Closeable {
   }
 
   final private static PathFilter DEFAULT_FILTER = new PathFilter() {
+      @Override
       public boolean accept(Path file) {
         return true;
       }     
@@ -2056,6 +2059,7 @@ public abstract class FileSystem extends Configured implements Closeable {
    * No more filesystem operations are needed.  Will
    * release any held locks.
    */
+  @Override
   public void close() throws IOException {
     // delete all files that were marked as delete-on-exit.
     processDeleteOnExit();
@@ -2393,6 +2397,7 @@ public abstract class FileSystem extends Configured implements Closeable {
     }
 
     private class ClientFinalizer implements Runnable {
+      @Override
       public synchronized void run() {
         try {
           closeAll(true);
@@ -2447,7 +2452,7 @@ public abstract class FileSystem extends Configured implements Closeable {
         this.ugi = UserGroupInformation.getCurrentUser();
       }
 
-      /** {@inheritDoc} */
+      @Override
       public int hashCode() {
         return (scheme + authority).hashCode() + ugi.hashCode() + (int)unique;
       }
@@ -2456,7 +2461,7 @@ public abstract class FileSystem extends Configured implements Closeable {
         return a == b || (a != null && a.equals(b));        
       }
 
-      /** {@inheritDoc} */
+      @Override
       public boolean equals(Object obj) {
         if (obj == this) {
           return true;
@@ -2471,7 +2476,7 @@ public abstract class FileSystem extends Configured implements Closeable {
         return false;        
       }
 
-      /** {@inheritDoc} */
+      @Override
       public String toString() {
         return "("+ugi.toString() + ")@" + scheme + "://" + authority;        
       }
@@ -2584,6 +2589,7 @@ public abstract class FileSystem extends Configured implements Closeable {
       return writeOps.get();
     }
 
+    @Override
     public String toString() {
       return bytesRead + " bytes read, " + bytesWritten + " bytes written, "
           + readOps + " read ops, " + largeReadOps + " large read ops, "

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java

@@ -414,9 +414,11 @@ public class FileUtil {
     String getResult() throws IOException {
       return result;
     }
+    @Override
     protected String[] getExecString() {
       return command;
     }
+    @Override
     protected void parseExecResult(BufferedReader lines) throws IOException {
       String line = lines.readLine();
       if (line == null) {

+ 26 - 12
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java

@@ -76,6 +76,7 @@ public class FilterFileSystem extends FileSystem {
    *   for this FileSystem
    * @param conf the configuration
    */
+  @Override
   public void initialize(URI name, Configuration conf) throws IOException {
     super.initialize(name, conf);
     // this is less than ideal, but existing filesystems sometimes neglect
@@ -90,6 +91,7 @@ public class FilterFileSystem extends FileSystem {
   }
 
   /** Returns a URI whose scheme and authority identify this FileSystem.*/
+  @Override
   public URI getUri() {
     return fs.getUri();
   }
@@ -104,6 +106,7 @@ public class FilterFileSystem extends FileSystem {
   }
   
   /** Make sure that a path specifies a FileSystem. */
+  @Override
   public Path makeQualified(Path path) {
     Path fqPath = fs.makeQualified(path);
     // swap in our scheme if the filtered fs is using a different scheme
@@ -125,10 +128,12 @@ public class FilterFileSystem extends FileSystem {
   ///////////////////////////////////////////////////////////////
 
   /** Check that a Path belongs to this FileSystem. */
+  @Override
   protected void checkPath(Path path) {
     fs.checkPath(path);
   }
 
+  @Override
   public BlockLocation[] getFileBlockLocations(FileStatus file, long start,
     long len) throws IOException {
       return fs.getFileBlockLocations(file, start, len);
@@ -143,17 +148,17 @@ public class FilterFileSystem extends FileSystem {
    * @param f the file name to open
    * @param bufferSize the size of the buffer to be used.
    */
+  @Override
   public FSDataInputStream open(Path f, int bufferSize) throws IOException {
     return fs.open(f, bufferSize);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public FSDataOutputStream append(Path f, int bufferSize,
       Progressable progress) throws IOException {
     return fs.append(f, bufferSize, progress);
   }
 
-  /** {@inheritDoc} */
   @Override
   public FSDataOutputStream create(Path f, FsPermission permission,
       boolean overwrite, int bufferSize, short replication, long blockSize,
@@ -171,6 +176,7 @@ public class FilterFileSystem extends FileSystem {
    * @return true if successful;
    *         false if file does not exist or is a directory
    */
+  @Override
   public boolean setReplication(Path src, short replication) throws IOException {
     return fs.setReplication(src, replication);
   }
@@ -179,23 +185,23 @@ public class FilterFileSystem extends FileSystem {
    * Renames Path src to Path dst.  Can take place on local fs
    * or remote DFS.
    */
+  @Override
   public boolean rename(Path src, Path dst) throws IOException {
     return fs.rename(src, dst);
   }
   
   /** Delete a file */
+  @Override
   public boolean delete(Path f, boolean recursive) throws IOException {
     return fs.delete(f, recursive);
   }
   
   /** List files in a directory. */
+  @Override
   public FileStatus[] listStatus(Path f) throws IOException {
     return fs.listStatus(f);
   }
 
-  /**
-   * {@inheritDoc}
-   */
   @Override
   public RemoteIterator<Path> listCorruptFileBlocks(Path path)
     throws IOException {
@@ -203,11 +209,13 @@ public class FilterFileSystem extends FileSystem {
   }
 
   /** List files and its block locations in a directory. */
+  @Override
   public RemoteIterator<LocatedFileStatus> listLocatedStatus(Path f)
   throws IOException {
     return fs.listLocatedStatus(f);
   }
   
+  @Override
   public Path getHomeDirectory() {
     return fs.getHomeDirectory();
   }
@@ -219,6 +227,7 @@ public class FilterFileSystem extends FileSystem {
    * 
    * @param newDir
    */
+  @Override
   public void setWorkingDirectory(Path newDir) {
     fs.setWorkingDirectory(newDir);
   }
@@ -228,21 +237,21 @@ public class FilterFileSystem extends FileSystem {
    * 
    * @return the directory pathname
    */
+  @Override
   public Path getWorkingDirectory() {
     return fs.getWorkingDirectory();
   }
   
+  @Override
   protected Path getInitialWorkingDirectory() {
     return fs.getInitialWorkingDirectory();
   }
   
-  /** {@inheritDoc} */
   @Override
   public FsStatus getStatus(Path p) throws IOException {
     return fs.getStatus(p);
   }
   
-  /** {@inheritDoc} */
   @Override
   public boolean mkdirs(Path f, FsPermission permission) throws IOException {
     return fs.mkdirs(f, permission);
@@ -254,6 +263,7 @@ public class FilterFileSystem extends FileSystem {
    * the given dst name.
    * delSrc indicates if the source should be removed
    */
+  @Override
   public void copyFromLocalFile(boolean delSrc, Path src, Path dst)
     throws IOException {
     fs.copyFromLocalFile(delSrc, src, dst);
@@ -264,6 +274,7 @@ public class FilterFileSystem extends FileSystem {
    * the given dst name.
    * delSrc indicates if the source should be removed
    */
+  @Override
   public void copyFromLocalFile(boolean delSrc, boolean overwrite, 
                                 Path[] srcs, Path dst)
     throws IOException {
@@ -275,6 +286,7 @@ public class FilterFileSystem extends FileSystem {
    * the given dst name.
    * delSrc indicates if the source should be removed
    */
+  @Override
   public void copyFromLocalFile(boolean delSrc, boolean overwrite, 
                                 Path src, Path dst)
     throws IOException {
@@ -286,6 +298,7 @@ public class FilterFileSystem extends FileSystem {
    * Copy it from FS control to the local dst name.
    * delSrc indicates if the src will be removed or not.
    */   
+  @Override
   public void copyToLocalFile(boolean delSrc, Path src, Path dst)
     throws IOException {
     fs.copyToLocalFile(delSrc, src, dst);
@@ -297,6 +310,7 @@ public class FilterFileSystem extends FileSystem {
    * file.  If the FS is local, we write directly into the target.  If
    * the FS is remote, we write into the tmp local area.
    */
+  @Override
   public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile)
     throws IOException {
     return fs.startLocalOutput(fsOutputFile, tmpLocalFile);
@@ -308,12 +322,14 @@ public class FilterFileSystem extends FileSystem {
    * FS will copy the contents of tmpLocalFile to the correct target at
    * fsOutputFile.
    */
+  @Override
   public void completeLocalOutput(Path fsOutputFile, Path tmpLocalFile)
     throws IOException {
     fs.completeLocalOutput(fsOutputFile, tmpLocalFile);
   }
 
   /** Return the total size of all files in the filesystem.*/
+  @Override
   public long getUsed() throws IOException{
     return fs.getUsed();
   }
@@ -357,16 +373,17 @@ public class FilterFileSystem extends FileSystem {
   /**
    * Get file status.
    */
+  @Override
   public FileStatus getFileStatus(Path f) throws IOException {
     return fs.getFileStatus(f);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public FileChecksum getFileChecksum(Path f) throws IOException {
     return fs.getFileChecksum(f);
   }
   
-  /** {@inheritDoc} */
+  @Override
   public void setVerifyChecksum(boolean verifyChecksum) {
     fs.setVerifyChecksum(verifyChecksum);
   }
@@ -387,21 +404,18 @@ public class FilterFileSystem extends FileSystem {
     fs.close();
   }
 
-  /** {@inheritDoc} */
   @Override
   public void setOwner(Path p, String username, String groupname
       ) throws IOException {
     fs.setOwner(p, username, groupname);
   }
 
-  /** {@inheritDoc} */
   @Override
   public void setTimes(Path p, long mtime, long atime
       ) throws IOException {
     fs.setTimes(p, mtime, atime);
   }
 
-  /** {@inheritDoc} */
   @Override
   public void setPermission(Path p, FsPermission permission
       ) throws IOException {

+ 0 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java

@@ -174,9 +174,6 @@ public abstract class FilterFs extends AbstractFileSystem {
     return myFs.listStatus(f);
   }
 
-  /**
-   * {@inheritDoc}
-   */
   @Override
   public RemoteIterator<Path> listCorruptFileBlocks(Path path)
     throws IOException {

+ 3 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsServerDefaults.java

@@ -39,6 +39,7 @@ public class FsServerDefaults implements Writable {
 
   static { // register a ctor
     WritableFactories.setFactory(FsServerDefaults.class, new WritableFactory() {
+      @Override
       public Writable newInstance() {
         return new FsServerDefaults();
       }
@@ -106,6 +107,7 @@ public class FsServerDefaults implements Writable {
   // /////////////////////////////////////////
   // Writable
   // /////////////////////////////////////////
+  @Override
   @InterfaceAudience.Private
   public void write(DataOutput out) throws IOException {
     out.writeLong(blockSize);
@@ -116,6 +118,7 @@ public class FsServerDefaults implements Writable {
     WritableUtils.writeEnum(out, checksumType);
   }
 
+  @Override
   @InterfaceAudience.Private
   public void readFields(DataInput in) throws IOException {
     blockSize = in.readLong();

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java

@@ -236,6 +236,7 @@ public class FsShell extends Configured implements Tool {
   /**
    * run
    */
+  @Override
   public int run(String argv[]) throws Exception {
     // initialize FsShell
     init();

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java

@@ -60,12 +60,14 @@ public class FsStatus implements Writable {
   //////////////////////////////////////////////////
   // Writable
   //////////////////////////////////////////////////
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeLong(capacity);
     out.writeLong(used);
     out.writeLong(remaining);
   }
 
+  @Override
   public void readFields(DataInput in) throws IOException {
     capacity = in.readLong();
     used = in.readLong();

+ 0 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlConnection.java

@@ -53,7 +53,6 @@ class FsUrlConnection extends URLConnection {
     }
   }
 
-  /* @inheritDoc */
   @Override
   public InputStream getInputStream() throws IOException {
     if (is == null) {

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java

@@ -59,6 +59,7 @@ public class FsUrlStreamHandlerFactory implements
     this.handler = new FsUrlStreamHandler(this.conf);
   }
 
+  @Override
   public java.net.URLStreamHandler createURLStreamHandler(String protocol) {
     if (!protocols.containsKey(protocol)) {
       boolean known = true;

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobFilter.java

@@ -31,6 +31,7 @@ import org.apache.hadoop.classification.InterfaceStability;
 @InterfaceStability.Evolving
 public class GlobFilter implements PathFilter {
   private final static PathFilter DEFAULT_FILTER = new PathFilter() {
+      @Override
       public boolean accept(Path file) {
         return true;
       }
@@ -75,6 +76,7 @@ public class GlobFilter implements PathFilter {
     return pattern.hasWildcard();
   }
 
+  @Override
   public boolean accept(Path path) {
     return pattern.matches(path.getName()) && userFilter.accept(path);
   }

+ 26 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java

@@ -106,6 +106,7 @@ public class HarFileSystem extends FilterFileSystem {
    * har:///archivepath. This assumes the underlying filesystem
    * to be used in case not specified.
    */
+  @Override
   public void initialize(URI name, Configuration conf) throws IOException {
     // decode the name
     URI underLyingURI = decodeHarURI(name, conf);
@@ -247,6 +248,7 @@ public class HarFileSystem extends FilterFileSystem {
   /**
    * return the top level archive.
    */
+  @Override
   public Path getWorkingDirectory() {
     return new Path(uri.toString());
   }
@@ -636,6 +638,7 @@ public class HarFileSystem extends FilterFileSystem {
   /**
    * @return null since no checksum algorithm is implemented.
    */
+  @Override
   public FileChecksum getFileChecksum(Path f) {
     return null;
   }
@@ -668,6 +671,7 @@ public class HarFileSystem extends FilterFileSystem {
     throw new IOException("Har: Create not allowed");
   }
   
+  @Override
   public FSDataOutputStream create(Path f,
       FsPermission permission,
       boolean overwrite,
@@ -735,10 +739,12 @@ public class HarFileSystem extends FilterFileSystem {
   /**
    * return the top level archive path.
    */
+  @Override
   public Path getHomeDirectory() {
     return new Path(uri.toString());
   }
   
+  @Override
   public void setWorkingDirectory(Path newDir) {
     //does nothing.
   }
@@ -746,6 +752,7 @@ public class HarFileSystem extends FilterFileSystem {
   /**
    * not implemented.
    */
+  @Override
   public boolean mkdirs(Path f, FsPermission permission) throws IOException {
     throw new IOException("Har: mkdirs not allowed");
   }
@@ -753,6 +760,7 @@ public class HarFileSystem extends FilterFileSystem {
   /**
    * not implemented.
    */
+  @Override
   public void copyFromLocalFile(boolean delSrc, Path src, Path dst) throws 
         IOException {
     throw new IOException("Har: copyfromlocalfile not allowed");
@@ -761,6 +769,7 @@ public class HarFileSystem extends FilterFileSystem {
   /**
    * copies the file in the har filesystem to a local file.
    */
+  @Override
   public void copyToLocalFile(boolean delSrc, Path src, Path dst) 
     throws IOException {
     FileUtil.copy(this, src, getLocal(getConf()), dst, false, getConf());
@@ -769,6 +778,7 @@ public class HarFileSystem extends FilterFileSystem {
   /**
    * not implemented.
    */
+  @Override
   public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile) 
     throws IOException {
     throw new IOException("Har: startLocalOutput not allowed");
@@ -777,6 +787,7 @@ public class HarFileSystem extends FilterFileSystem {
   /**
    * not implemented.
    */
+  @Override
   public void completeLocalOutput(Path fsOutputFile, Path tmpLocalFile) 
     throws IOException {
     throw new IOException("Har: completeLocalOutput not allowed");
@@ -785,6 +796,7 @@ public class HarFileSystem extends FilterFileSystem {
   /**
    * not implemented.
    */
+  @Override
   public void setOwner(Path p, String username, String groupname)
     throws IOException {
     throw new IOException("Har: setowner not allowed");
@@ -793,6 +805,7 @@ public class HarFileSystem extends FilterFileSystem {
   /**
    * Not implemented.
    */
+  @Override
   public void setPermission(Path p, FsPermission permisssion) 
     throws IOException {
     throw new IOException("Har: setPermission not allowed");
@@ -825,6 +838,7 @@ public class HarFileSystem extends FilterFileSystem {
         this.end = start + length;
       }
       
+      @Override
       public synchronized int available() throws IOException {
         long remaining = end - underLyingStream.getPos();
         if (remaining > (long)Integer.MAX_VALUE) {
@@ -833,6 +847,7 @@ public class HarFileSystem extends FilterFileSystem {
         return (int) remaining;
       }
       
+      @Override
       public synchronized  void close() throws IOException {
         underLyingStream.close();
         super.close();
@@ -847,15 +862,18 @@ public class HarFileSystem extends FilterFileSystem {
       /**
        * reset is not implemented
        */
+      @Override
       public void reset() throws IOException {
         throw new IOException("reset not implemented.");
       }
       
+      @Override
       public synchronized int read() throws IOException {
         int ret = read(oneBytebuff, 0, 1);
         return (ret <= 0) ? -1: (oneBytebuff[0] & 0xff);
       }
       
+      @Override
       public synchronized int read(byte[] b) throws IOException {
         int ret = read(b, 0, b.length);
         if (ret != -1) {
@@ -867,6 +885,7 @@ public class HarFileSystem extends FilterFileSystem {
       /**
        * 
        */
+      @Override
       public synchronized int read(byte[] b, int offset, int len) 
         throws IOException {
         int newlen = len;
@@ -882,6 +901,7 @@ public class HarFileSystem extends FilterFileSystem {
         return ret;
       }
       
+      @Override
       public synchronized long skip(long n) throws IOException {
         long tmpN = n;
         if (tmpN > 0) {
@@ -895,10 +915,12 @@ public class HarFileSystem extends FilterFileSystem {
         return (tmpN < 0)? -1 : 0;
       }
       
+      @Override
       public synchronized long getPos() throws IOException {
         return (position - start);
       }
       
+      @Override
       public synchronized void seek(long pos) throws IOException {
         if (pos < 0 || (start + pos > end)) {
           throw new IOException("Failed to seek: EOF");
@@ -907,6 +929,7 @@ public class HarFileSystem extends FilterFileSystem {
         underLyingStream.seek(position);
       }
 
+      @Override
       public boolean seekToNewSource(long targetPos) throws IOException {
         //do not need to implement this
         // hdfs in itself does seektonewsource 
@@ -917,6 +940,7 @@ public class HarFileSystem extends FilterFileSystem {
       /**
        * implementing position readable. 
        */
+      @Override
       public int read(long pos, byte[] b, int offset, int length) 
       throws IOException {
         int nlength = length;
@@ -929,6 +953,7 @@ public class HarFileSystem extends FilterFileSystem {
       /**
        * position readable again.
        */
+      @Override
       public void readFully(long pos, byte[] b, int offset, int length) 
       throws IOException {
         if (start + length + pos > end) {
@@ -937,6 +962,7 @@ public class HarFileSystem extends FilterFileSystem {
         underLyingStream.readFully(pos + start, b, offset, length);
       }
       
+      @Override
       public void readFully(long pos, byte[] b) throws IOException {
           readFully(pos, b, 0, b.length);
       }

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java

@@ -91,6 +91,7 @@ public class LocalFileSystem extends ChecksumFileSystem {
    * Moves files to a bad file directory on the same device, so that their
    * storage will not be reused.
    */
+  @Override
   public boolean reportChecksumFailure(Path p, FSDataInputStream in,
                                        long inPos,
                                        FSDataInputStream sums, long sumsPos) {

+ 3 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocatedFileStatus.java

@@ -94,6 +94,7 @@ public class LocatedFileStatus extends FileStatus {
    * @throws ClassCastException if the specified object's is not of 
    *         type FileStatus
    */
+  @Override
   public int compareTo(Object o) {
     return super.compareTo(o);
   }
@@ -102,6 +103,7 @@ public class LocatedFileStatus extends FileStatus {
    * @param   o the object to be compared.
    * @return  true if two file status has the same path name; false if not.
    */
+  @Override
   public boolean equals(Object o) {
     return super.equals(o);
   }
@@ -112,6 +114,7 @@ public class LocatedFileStatus extends FileStatus {
    *
    * @return  a hash code value for the path name.
    */
+  @Override
   public int hashCode() {
     return super.hashCode();
   }

+ 10 - 10
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java

@@ -57,7 +57,7 @@ public class MD5MD5CRC32FileChecksum extends FileChecksum {
     this.md5 = md5;
   }
   
-  /** {@inheritDoc} */ 
+  @Override
   public String getAlgorithmName() {
     return "MD5-of-" + crcPerBlock + "MD5-of-" + bytesPerCRC +
         getCrcType().name();
@@ -73,11 +73,11 @@ public class MD5MD5CRC32FileChecksum extends FileChecksum {
 
     throw new IOException("Unknown checksum type in " + algorithm);
   }
-
-  /** {@inheritDoc} */ 
+ 
+  @Override
   public int getLength() {return LENGTH;}
-
-  /** {@inheritDoc} */ 
+ 
+  @Override
   public byte[] getBytes() {
     return WritableUtils.toByteArray(this);
   }
@@ -92,14 +92,14 @@ public class MD5MD5CRC32FileChecksum extends FileChecksum {
     return new ChecksumOpt(getCrcType(), bytesPerCRC);
   }
 
-  /** {@inheritDoc} */ 
+  @Override
   public void readFields(DataInput in) throws IOException {
     bytesPerCRC = in.readInt();
     crcPerBlock = in.readLong();
     md5 = MD5Hash.read(in);
   }
-
-  /** {@inheritDoc} */ 
+ 
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeInt(bytesPerCRC);
     out.writeLong(crcPerBlock);
@@ -161,8 +161,8 @@ public class MD5MD5CRC32FileChecksum extends FileChecksum {
           + ", md5=" + md5, e);
     }
   }
-
-  /** {@inheritDoc} */ 
+ 
+  @Override
   public String toString() {
     return getAlgorithmName() + ":" + md5;
   }

+ 0 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java

@@ -22,7 +22,6 @@ import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.DataChecksum;
 import org.apache.hadoop.util.Progressable;
-import org.apache.hadoop.HadoopIllegalArgumentException;
 
 /**
  * This class contains options related to file system operations.

+ 4 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java

@@ -261,6 +261,7 @@ public class Path implements Comparable {
     return new Path(getParent(), getName()+suffix);
   }
 
+  @Override
   public String toString() {
     // we can't use uri.toString(), which escapes everything, because we want
     // illegal characters unescaped in the string, for glob processing, etc.
@@ -289,6 +290,7 @@ public class Path implements Comparable {
     return buffer.toString();
   }
 
+  @Override
   public boolean equals(Object o) {
     if (!(o instanceof Path)) {
       return false;
@@ -297,10 +299,12 @@ public class Path implements Comparable {
     return this.uri.equals(that.uri);
   }
 
+  @Override
   public int hashCode() {
     return uri.hashCode();
   }
 
+  @Override
   public int compareTo(Object o) {
     Path that = (Path)o;
     return this.uri.compareTo(that.uri);

+ 30 - 6
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java

@@ -72,8 +72,10 @@ public class RawLocalFileSystem extends FileSystem {
     return new File(path.toUri().getPath());
   }
 
+  @Override
   public URI getUri() { return NAME; }
   
+  @Override
   public void initialize(URI uri, Configuration conf) throws IOException {
     super.initialize(uri, conf);
     setConf(conf);
@@ -84,6 +86,7 @@ public class RawLocalFileSystem extends FileSystem {
       super(f);
     }
     
+    @Override
     public int read() throws IOException {
       int result = super.read();
       if (result != -1) {
@@ -92,6 +95,7 @@ public class RawLocalFileSystem extends FileSystem {
       return result;
     }
     
+    @Override
     public int read(byte[] data) throws IOException {
       int result = super.read(data);
       if (result != -1) {
@@ -100,6 +104,7 @@ public class RawLocalFileSystem extends FileSystem {
       return result;
     }
     
+    @Override
     public int read(byte[] data, int offset, int length) throws IOException {
       int result = super.read(data, offset, length);
       if (result != -1) {
@@ -120,15 +125,18 @@ public class RawLocalFileSystem extends FileSystem {
       this.fis = new TrackingFileInputStream(pathToFile(f));
     }
     
+    @Override
     public void seek(long pos) throws IOException {
       fis.getChannel().position(pos);
       this.position = pos;
     }
     
+    @Override
     public long getPos() throws IOException {
       return this.position;
     }
     
+    @Override
     public boolean seekToNewSource(long targetPos) throws IOException {
       return false;
     }
@@ -136,11 +144,14 @@ public class RawLocalFileSystem extends FileSystem {
     /*
      * Just forward to the fis
      */
+    @Override
     public int available() throws IOException { return fis.available(); }
+    @Override
     public void close() throws IOException { fis.close(); }
     @Override
     public boolean markSupported() { return false; }
     
+    @Override
     public int read() throws IOException {
       try {
         int value = fis.read();
@@ -153,6 +164,7 @@ public class RawLocalFileSystem extends FileSystem {
       }
     }
     
+    @Override
     public int read(byte[] b, int off, int len) throws IOException {
       try {
         int value = fis.read(b, off, len);
@@ -165,6 +177,7 @@ public class RawLocalFileSystem extends FileSystem {
       }
     }
     
+    @Override
     public int read(long position, byte[] b, int off, int len)
       throws IOException {
       ByteBuffer bb = ByteBuffer.wrap(b, off, len);
@@ -175,6 +188,7 @@ public class RawLocalFileSystem extends FileSystem {
       }
     }
     
+    @Override
     public long skip(long n) throws IOException {
       long value = fis.skip(n);
       if (value > 0) {
@@ -189,6 +203,7 @@ public class RawLocalFileSystem extends FileSystem {
     }
   }
   
+  @Override
   public FSDataInputStream open(Path f, int bufferSize) throws IOException {
     if (!exists(f)) {
       throw new FileNotFoundException(f.toString());
@@ -210,8 +225,11 @@ public class RawLocalFileSystem extends FileSystem {
     /*
      * Just forward to the fos
      */
+    @Override
     public void close() throws IOException { fos.close(); }
+    @Override
     public void flush() throws IOException { fos.flush(); }
+    @Override
     public void write(byte[] b, int off, int len) throws IOException {
       try {
         fos.write(b, off, len);
@@ -220,6 +238,7 @@ public class RawLocalFileSystem extends FileSystem {
       }
     }
     
+    @Override
     public void write(int b) throws IOException {
       try {
         fos.write(b);
@@ -229,7 +248,7 @@ public class RawLocalFileSystem extends FileSystem {
     }
   }
 
-  /** {@inheritDoc} */
+  @Override
   public FSDataOutputStream append(Path f, int bufferSize,
       Progressable progress) throws IOException {
     if (!exists(f)) {
@@ -242,7 +261,6 @@ public class RawLocalFileSystem extends FileSystem {
         new LocalFSFileOutputStream(f, true), bufferSize), statistics);
   }
 
-  /** {@inheritDoc} */
   @Override
   public FSDataOutputStream create(Path f, boolean overwrite, int bufferSize,
     short replication, long blockSize, Progressable progress)
@@ -264,7 +282,6 @@ public class RawLocalFileSystem extends FileSystem {
         new LocalFSFileOutputStream(f, false), bufferSize), statistics);
   }
 
-  /** {@inheritDoc} */
   @Override
   public FSDataOutputStream create(Path f, FsPermission permission,
     boolean overwrite, int bufferSize, short replication, long blockSize,
@@ -276,7 +293,6 @@ public class RawLocalFileSystem extends FileSystem {
     return out;
   }
 
-  /** {@inheritDoc} */
   @Override
   public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
       boolean overwrite,
@@ -288,6 +304,7 @@ public class RawLocalFileSystem extends FileSystem {
     return out;
   }
 
+  @Override
   public boolean rename(Path src, Path dst) throws IOException {
     if (pathToFile(src).renameTo(pathToFile(dst))) {
       return true;
@@ -302,6 +319,7 @@ public class RawLocalFileSystem extends FileSystem {
    * @return true if the file or directory and all its contents were deleted
    * @throws IOException if p is non-empty and recursive is false 
    */
+  @Override
   public boolean delete(Path p, boolean recursive) throws IOException {
     File f = pathToFile(p);
     if (f.isFile()) {
@@ -319,6 +337,7 @@ public class RawLocalFileSystem extends FileSystem {
    * (<b>Note</b>: Returned list is not sorted in any given order,
    * due to reliance on Java's {@link File#list()} API.)
    */
+  @Override
   public FileStatus[] listStatus(Path f) throws IOException {
     File localf = pathToFile(f);
     FileStatus[] results;
@@ -356,6 +375,7 @@ public class RawLocalFileSystem extends FileSystem {
    * Creates the specified directory hierarchy. Does not
    * treat existence as an error.
    */
+  @Override
   public boolean mkdirs(Path f) throws IOException {
     if(f == null) {
       throw new IllegalArgumentException("mkdirs path arg is null");
@@ -373,7 +393,6 @@ public class RawLocalFileSystem extends FileSystem {
       (p2f.mkdir() || p2f.isDirectory());
   }
 
-  /** {@inheritDoc} */
   @Override
   public boolean mkdirs(Path f, FsPermission permission) throws IOException {
     boolean b = mkdirs(f);
@@ -418,7 +437,6 @@ public class RawLocalFileSystem extends FileSystem {
     return this.makeQualified(new Path(System.getProperty("user.dir")));
   }
 
-  /** {@inheritDoc} */
   @Override
   public FsStatus getStatus(Path p) throws IOException {
     File partition = pathToFile(p == null ? new Path("/") : p);
@@ -430,29 +448,35 @@ public class RawLocalFileSystem extends FileSystem {
   }
   
   // In the case of the local filesystem, we can just rename the file.
+  @Override
   public void moveFromLocalFile(Path src, Path dst) throws IOException {
     rename(src, dst);
   }
   
   // We can write output directly to the final location
+  @Override
   public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile)
     throws IOException {
     return fsOutputFile;
   }
   
   // It's in the right place - nothing to do.
+  @Override
   public void completeLocalOutput(Path fsWorkingFile, Path tmpLocalFile)
     throws IOException {
   }
   
+  @Override
   public void close() throws IOException {
     super.close();
   }
   
+  @Override
   public String toString() {
     return "LocalFS";
   }
   
+  @Override
   public FileStatus getFileStatus(Path f) throws IOException {
     File path = pathToFile(f);
     if (path.exists()) {

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java

@@ -263,6 +263,7 @@ public class TrashPolicyDefault extends TrashPolicy {
       }
     }
 
+    @Override
     public void run() {
       if (emptierInterval == 0)
         return;                                   // trash disabled

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java

@@ -262,6 +262,7 @@ public class FTPFileSystem extends FileSystem {
   }
 
   /** This optional operation is not yet supported. */
+  @Override
   public FSDataOutputStream append(Path f, int bufferSize,
       Progressable progress) throws IOException {
     throw new IOException("Not supported");

+ 9 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java

@@ -51,19 +51,23 @@ public class FTPInputStream extends FSInputStream {
     this.closed = false;
   }
 
+  @Override
   public long getPos() throws IOException {
     return pos;
   }
 
   // We don't support seek.
+  @Override
   public void seek(long pos) throws IOException {
     throw new IOException("Seek not supported");
   }
 
+  @Override
   public boolean seekToNewSource(long targetPos) throws IOException {
     throw new IOException("Seek not supported");
   }
 
+  @Override
   public synchronized int read() throws IOException {
     if (closed) {
       throw new IOException("Stream closed");
@@ -79,6 +83,7 @@ public class FTPInputStream extends FSInputStream {
     return byteRead;
   }
 
+  @Override
   public synchronized int read(byte buf[], int off, int len) throws IOException {
     if (closed) {
       throw new IOException("Stream closed");
@@ -95,6 +100,7 @@ public class FTPInputStream extends FSInputStream {
     return result;
   }
 
+  @Override
   public synchronized void close() throws IOException {
     if (closed) {
       throw new IOException("Stream closed");
@@ -116,14 +122,17 @@ public class FTPInputStream extends FSInputStream {
 
   // Not supported.
 
+  @Override
   public boolean markSupported() {
     return false;
   }
 
+  @Override
   public void mark(int readLimit) {
     // Do nothing
   }
 
+  @Override
   public void reset() throws IOException {
     throw new IOException("Mark not supported");
   }

+ 17 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSImpl.java

@@ -50,22 +50,27 @@ class KFSImpl implements IFSImpl {
         statistics = stats;
     }
 
+    @Override
     public boolean exists(String path) throws IOException {
         return kfsAccess.kfs_exists(path);
     }
 
+    @Override
     public boolean isDirectory(String path) throws IOException {
         return kfsAccess.kfs_isDirectory(path);
     }
 
+    @Override
     public boolean isFile(String path) throws IOException {
         return kfsAccess.kfs_isFile(path);
     }
 
+    @Override
     public String[] readdir(String path) throws IOException {
         return kfsAccess.kfs_readdir(path);
     }
 
+    @Override
     public FileStatus[] readdirplus(Path path) throws IOException {
         String srep = path.toUri().getPath();
         KfsFileAttr[] fattr = kfsAccess.kfs_readdirplus(srep);
@@ -100,52 +105,64 @@ class KFSImpl implements IFSImpl {
     }
 
 
+    @Override
     public int mkdirs(String path) throws IOException {
         return kfsAccess.kfs_mkdirs(path);
     }
 
+    @Override
     public int rename(String source, String dest) throws IOException {
         return kfsAccess.kfs_rename(source, dest);
     }
 
+    @Override
     public int rmdir(String path) throws IOException {
         return kfsAccess.kfs_rmdir(path);
     }
 
+    @Override
     public int remove(String path) throws IOException {
         return kfsAccess.kfs_remove(path);
     }
 
+    @Override
     public long filesize(String path) throws IOException {
         return kfsAccess.kfs_filesize(path);
     }
 
+    @Override
     public short getReplication(String path) throws IOException {
         return kfsAccess.kfs_getReplication(path);
     }
 
+    @Override
     public short setReplication(String path, short replication) throws IOException {
         return kfsAccess.kfs_setReplication(path, replication);
     }
 
+    @Override
     public String[][] getDataLocation(String path, long start, long len) throws IOException {
         return kfsAccess.kfs_getDataLocation(path, start, len);
     }
 
+    @Override
     public long getModificationTime(String path) throws IOException {
         return kfsAccess.kfs_getModificationTime(path);
     }
 
+    @Override
     public FSDataInputStream open(String path, int bufferSize) throws IOException {
         return new FSDataInputStream(new KFSInputStream(kfsAccess, path, 
                                                         statistics));
     }
 
+    @Override
     public FSDataOutputStream create(String path, short replication, int bufferSize, Progressable progress) throws IOException {
         return new FSDataOutputStream(new KFSOutputStream(kfsAccess, path, replication, false, progress), 
                                       statistics);
     }
 
+    @Override
     public FSDataOutputStream append(String path, int bufferSize, Progressable progress) throws IOException {
         // when opening for append, # of replicas is ignored
         return new FSDataOutputStream(new KFSOutputStream(kfsAccess, path, (short) 1, true, progress), 

+ 10 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSInputStream.java

@@ -53,6 +53,7 @@ class KFSInputStream extends FSInputStream {
             this.fsize = 0;
     }
 
+    @Override
     public long getPos() throws IOException {
         if (kfsChannel == null) {
             throw new IOException("File closed");
@@ -60,6 +61,7 @@ class KFSInputStream extends FSInputStream {
         return kfsChannel.tell();
     }
 
+    @Override
     public synchronized int available() throws IOException {
         if (kfsChannel == null) {
             throw new IOException("File closed");
@@ -67,6 +69,7 @@ class KFSInputStream extends FSInputStream {
         return (int) (this.fsize - getPos());
     }
 
+    @Override
     public synchronized void seek(long targetPos) throws IOException {
         if (kfsChannel == null) {
             throw new IOException("File closed");
@@ -74,10 +77,12 @@ class KFSInputStream extends FSInputStream {
         kfsChannel.seek(targetPos);
     }
 
+    @Override
     public synchronized boolean seekToNewSource(long targetPos) throws IOException {
         return false;
     }
 
+    @Override
     public synchronized int read() throws IOException {
         if (kfsChannel == null) {
             throw new IOException("File closed");
@@ -93,6 +98,7 @@ class KFSInputStream extends FSInputStream {
         return -1;
     }
 
+    @Override
     public synchronized int read(byte b[], int off, int len) throws IOException {
         if (kfsChannel == null) {
             throw new IOException("File closed");
@@ -109,6 +115,7 @@ class KFSInputStream extends FSInputStream {
 	return res;
     }
 
+    @Override
     public synchronized void close() throws IOException {
         if (kfsChannel == null) {
             return;
@@ -118,14 +125,17 @@ class KFSInputStream extends FSInputStream {
         kfsChannel = null;
     }
 
+    @Override
     public boolean markSupported() {
         return false;
     }
 
+    @Override
     public void mark(int readLimit) {
         // Do nothing
     }
 
+    @Override
     public void reset() throws IOException {
         throw new IOException("Mark not supported");
     }

+ 4 - 5
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSOutputStream.java

@@ -20,15 +20,10 @@
 package org.apache.hadoop.fs.kfs;
 
 import java.io.*;
-import java.net.*;
-import java.util.*;
 import java.nio.ByteBuffer;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.util.Progressable;
 
 import org.kosmix.kosmosfs.access.KfsAccess;
@@ -60,6 +55,7 @@ class KFSOutputStream extends OutputStream {
         return kfsChannel.tell();
     }
 
+    @Override
     public void write(int v) throws IOException {
         if (kfsChannel == null) {
             throw new IOException("File closed");
@@ -70,6 +66,7 @@ class KFSOutputStream extends OutputStream {
         write(b, 0, 1);
     }
 
+    @Override
     public void write(byte b[], int off, int len) throws IOException {
         if (kfsChannel == null) {
             throw new IOException("File closed");
@@ -80,6 +77,7 @@ class KFSOutputStream extends OutputStream {
         kfsChannel.write(ByteBuffer.wrap(b, off, len));
     }
 
+    @Override
     public void flush() throws IOException {
         if (kfsChannel == null) {
             throw new IOException("File closed");
@@ -89,6 +87,7 @@ class KFSOutputStream extends OutputStream {
         kfsChannel.sync();
     }
 
+    @Override
     public synchronized void close() throws IOException {
         if (kfsChannel == null) {
             return;

+ 8 - 5
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java

@@ -40,6 +40,7 @@ public class FsPermission implements Writable {
   private static final Log LOG = LogFactory.getLog(FsPermission.class);
 
   static final WritableFactory FACTORY = new WritableFactory() {
+    @Override
     public Writable newInstance() { return new FsPermission(); }
   };
   static {                                      // register a ctor
@@ -124,12 +125,12 @@ public class FsPermission implements Writable {
     set(v[(n >>> 6) & 7], v[(n >>> 3) & 7], v[n & 7], (((n >>> 9) & 1) == 1) );
   }
 
-  /** {@inheritDoc} */
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeShort(toShort());
   }
 
-  /** {@inheritDoc} */
+  @Override
   public void readFields(DataInput in) throws IOException {
     fromShort(in.readShort());
   }
@@ -155,7 +156,7 @@ public class FsPermission implements Writable {
     return (short)s;
   }
 
-  /** {@inheritDoc} */
+  @Override
   public boolean equals(Object obj) {
     if (obj instanceof FsPermission) {
       FsPermission that = (FsPermission)obj;
@@ -167,10 +168,10 @@ public class FsPermission implements Writable {
     return false;
   }
 
-  /** {@inheritDoc} */
+  @Override
   public int hashCode() {return toShort();}
 
-  /** {@inheritDoc} */
+  @Override
   public String toString() {
     String str = useraction.SYMBOL + groupaction.SYMBOL + otheraction.SYMBOL;
     if(stickyBit) {
@@ -300,9 +301,11 @@ public class FsPermission implements Writable {
     public ImmutableFsPermission(short permission) {
       super(permission);
     }
+    @Override
     public FsPermission applyUMask(FsPermission umask) {
       throw new UnsupportedOperationException();
     }
+    @Override
     public void readFields(DataInput in) throws IOException {
       throw new UnsupportedOperationException();
     }    

+ 6 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java

@@ -32,6 +32,7 @@ import java.io.IOException;
 @InterfaceStability.Unstable
 public class PermissionStatus implements Writable {
   static final WritableFactory FACTORY = new WritableFactory() {
+    @Override
     public Writable newInstance() { return new PermissionStatus(); }
   };
   static {                                      // register a ctor
@@ -42,9 +43,11 @@ public class PermissionStatus implements Writable {
   public static PermissionStatus createImmutable(
       String user, String group, FsPermission permission) {
     return new PermissionStatus(user, group, permission) {
+      @Override
       public PermissionStatus applyUMask(FsPermission umask) {
         throw new UnsupportedOperationException();
       }
+      @Override
       public void readFields(DataInput in) throws IOException {
         throw new UnsupportedOperationException();
       }
@@ -82,14 +85,14 @@ public class PermissionStatus implements Writable {
     return this;
   }
 
-  /** {@inheritDoc} */
+  @Override
   public void readFields(DataInput in) throws IOException {
     username = Text.readString(in, Text.DEFAULT_MAX_LEN);
     groupname = Text.readString(in, Text.DEFAULT_MAX_LEN);
     permission = FsPermission.read(in);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public void write(DataOutput out) throws IOException {
     write(out, username, groupname, permission);
   }
@@ -115,7 +118,7 @@ public class PermissionStatus implements Writable {
     permission.write(out);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public String toString() {
     return username + ":" + groupname + ":" + permission;
   }

+ 14 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java

@@ -83,6 +83,7 @@ class Jets3tFileSystemStore implements FileSystemStore {
   private static final Log LOG = 
     LogFactory.getLog(Jets3tFileSystemStore.class.getName());
   
+  @Override
   public void initialize(URI uri, Configuration conf) throws IOException {
     
     this.conf = conf;
@@ -108,6 +109,7 @@ class Jets3tFileSystemStore implements FileSystemStore {
 		      );
   }
 
+  @Override
   public String getVersion() throws IOException {
     return FILE_SYSTEM_VERSION_VALUE;
   }
@@ -123,14 +125,17 @@ class Jets3tFileSystemStore implements FileSystemStore {
     }
   }
 
+  @Override
   public void deleteINode(Path path) throws IOException {
     delete(pathToKey(path));
   }
 
+  @Override
   public void deleteBlock(Block block) throws IOException {
     delete(blockToKey(block));
   }
 
+  @Override
   public boolean inodeExists(Path path) throws IOException {
     InputStream in = get(pathToKey(path), true);
     if (in == null) {
@@ -140,6 +145,7 @@ class Jets3tFileSystemStore implements FileSystemStore {
     return true;
   }
   
+  @Override
   public boolean blockExists(long blockId) throws IOException {
     InputStream in = get(blockToKey(blockId), false);
     if (in == null) {
@@ -203,10 +209,12 @@ class Jets3tFileSystemStore implements FileSystemStore {
     }
   }
 
+  @Override
   public INode retrieveINode(Path path) throws IOException {
     return INode.deserialize(get(pathToKey(path), true));
   }
 
+  @Override
   public File retrieveBlock(Block block, long byteRangeStart)
     throws IOException {
     File fileBlock = null;
@@ -249,6 +257,7 @@ class Jets3tFileSystemStore implements FileSystemStore {
     return result;
   }
 
+  @Override
   public Set<Path> listSubPaths(Path path) throws IOException {
     try {
       String prefix = pathToKey(path);
@@ -270,6 +279,7 @@ class Jets3tFileSystemStore implements FileSystemStore {
     }
   }
   
+  @Override
   public Set<Path> listDeepSubPaths(Path path) throws IOException {
     try {
       String prefix = pathToKey(path);
@@ -311,10 +321,12 @@ class Jets3tFileSystemStore implements FileSystemStore {
     }
   }
 
+  @Override
   public void storeINode(Path path, INode inode) throws IOException {
     put(pathToKey(path), inode.serialize(), inode.getSerializedLength(), true);
   }
 
+  @Override
   public void storeBlock(Block block, File file) throws IOException {
     BufferedInputStream in = null;
     try {
@@ -354,6 +366,7 @@ class Jets3tFileSystemStore implements FileSystemStore {
     return blockToKey(block.getId());
   }
 
+  @Override
   public void purge() throws IOException {
     try {
       S3Object[] objects = s3Service.listObjects(bucket);
@@ -368,6 +381,7 @@ class Jets3tFileSystemStore implements FileSystemStore {
     }
   }
 
+  @Override
   public void dump() throws IOException {
     StringBuilder sb = new StringBuilder("S3 Filesystem, ");
     sb.append(bucket.getName()).append("\n");

+ 4 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java

@@ -61,6 +61,7 @@ public class MigrationTool extends Configured implements Tool {
     System.exit(res);
   }
   
+  @Override
   public int run(String[] args) throws Exception {
     
     if (args.length == 0) {
@@ -195,6 +196,7 @@ public class MigrationTool extends Configured implements Tool {
   
   class UnversionedStore implements Store {
 
+    @Override
     public Set<Path> listAllPaths() throws IOException {
       try {
         String prefix = urlEncode(Path.SEPARATOR);
@@ -212,6 +214,7 @@ public class MigrationTool extends Configured implements Tool {
       }   
     }
 
+    @Override
     public void deleteINode(Path path) throws IOException {
       delete(pathToKey(path));
     }
@@ -227,6 +230,7 @@ public class MigrationTool extends Configured implements Tool {
       }
     }
     
+    @Override
     public INode retrieveINode(Path path) throws IOException {
       return INode.deserialize(get(pathToKey(path)));
     }

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java

@@ -206,6 +206,7 @@ public class S3FileSystem extends FileSystem {
   }
 
   /** This optional operation is not yet supported. */
+  @Override
   public FSDataOutputStream append(Path f, int bufferSize,
       Progressable progress) throws IOException {
     throw new IOException("Not supported");
@@ -298,6 +299,7 @@ public class S3FileSystem extends FileSystem {
     return true;
   }
 
+  @Override
   public boolean delete(Path path, boolean recursive) throws IOException {
    Path absolutePath = makeAbsolute(path);
    INode inode = store.retrieveINode(absolutePath);

+ 12 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java

@@ -49,6 +49,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore {
   private S3Service s3Service;
   private S3Bucket bucket;
   
+  @Override
   public void initialize(URI uri, Configuration conf) throws IOException {
     S3Credentials s3Credentials = new S3Credentials();
     s3Credentials.initialize(uri, conf);
@@ -63,6 +64,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore {
     bucket = new S3Bucket(uri.getHost());
   }
   
+  @Override
   public void storeFile(String key, File file, byte[] md5Hash)
     throws IOException {
     
@@ -90,6 +92,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore {
     }
   }
 
+  @Override
   public void storeEmptyFile(String key) throws IOException {
     try {
       S3Object object = new S3Object(key);
@@ -102,6 +105,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore {
     }
   }
   
+  @Override
   public FileMetadata retrieveMetadata(String key) throws IOException {
     try {
       S3Object object = s3Service.getObjectDetails(bucket, key);
@@ -117,6 +121,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore {
     }
   }
   
+  @Override
   public InputStream retrieve(String key) throws IOException {
     try {
       S3Object object = s3Service.getObject(bucket, key);
@@ -127,6 +132,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore {
     }
   }
   
+  @Override
   public InputStream retrieve(String key, long byteRangeStart)
     throws IOException {
     try {
@@ -139,11 +145,13 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore {
     }
   }
 
+  @Override
   public PartialListing list(String prefix, int maxListingLength)
     throws IOException {
     return list(prefix, maxListingLength, null, false);
   }
   
+  @Override
   public PartialListing list(String prefix, int maxListingLength, String priorLastKey,
       boolean recurse) throws IOException {
 
@@ -175,6 +183,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore {
     }
   }
 
+  @Override
   public void delete(String key) throws IOException {
     try {
       s3Service.deleteObject(bucket, key);
@@ -183,6 +192,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore {
     }
   }
   
+  @Override
   public void copy(String srcKey, String dstKey) throws IOException {
     try {
       s3Service.copyObject(bucket.getName(), srcKey, bucket.getName(),
@@ -192,6 +202,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore {
     }
   }
 
+  @Override
   public void purge(String prefix) throws IOException {
     try {
       S3Object[] objects = s3Service.listObjects(bucket, prefix, null);
@@ -203,6 +214,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore {
     }
   }
 
+  @Override
   public void dump() throws IOException {
     StringBuilder sb = new StringBuilder("S3 Native Filesystem, ");
     sb.append(bucket.getName()).append("\n");

+ 3 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFormat.java

@@ -150,6 +150,7 @@ public class CommandFormat {
       actual = got;
     }
 
+    @Override
     public String getMessage() {
       return "expected " + expected + " but got " + actual;
     }
@@ -165,6 +166,7 @@ public class CommandFormat {
       super(expected, actual);
     }
 
+    @Override
     public String getMessage() {
       return "Too many arguments: " + super.getMessage();
     }
@@ -180,6 +182,7 @@ public class CommandFormat {
       super(expected, actual);
     }
 
+    @Override
     public String getMessage() {
       return "Not enough arguments: " + super.getMessage();
     }

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java

@@ -114,6 +114,7 @@ class Delete {
   static class Rmr extends Rm {
     public static final String NAME = "rmr";
     
+    @Override
     protected void processOptions(LinkedList<String> args) throws IOException {
       args.addFirst("-r");
       super.processOptions(args);
@@ -136,6 +137,7 @@ class Delete {
     
     private boolean ignoreNonEmpty = false;
     
+    @Override
     protected void processOptions(LinkedList<String> args) throws IOException {
       CommandFormat cf = new CommandFormat(
           1, Integer.MAX_VALUE, "-ignore-fail-on-non-empty");

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java

@@ -161,6 +161,7 @@ class Display extends FsCommand {
       outbuf = new DataOutputBuffer();
     }
 
+    @Override
     public int read() throws IOException {
       int ret;
       if (null == inbuf || -1 == (ret = inbuf.read())) {
@@ -180,6 +181,7 @@ class Display extends FsCommand {
       return ret;
     }
 
+    @Override
     public void close() throws IOException {
       r.close();
       super.close();

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsCommand.java

@@ -73,6 +73,7 @@ abstract public class FsCommand extends Command {
   
   // abstract method that normally is invoked by runall() which is
   // overridden below
+  @Override
   protected void run(Path path) throws IOException {
     throw new RuntimeException("not supposed to get here");
   }

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java

@@ -380,6 +380,7 @@ public class PathData implements Comparable<PathData> {
    * as given on the commandline, or the full path
    * @return String of the path
    */
+  @Override
   public String toString() {
     String scheme = uri.getScheme();
     // No interpretation of symbols. Just decode % escaped chars.

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java

@@ -102,6 +102,7 @@ class ChRootedFileSystem extends FilterFileSystem {
    *   for this FileSystem
    * @param conf the configuration
    */
+  @Override
   public void initialize(final URI name, final Configuration conf)
       throws IOException {
     super.initialize(name, conf);

+ 0 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/NotInMountpointException.java

@@ -20,10 +20,6 @@ package org.apache.hadoop.fs.viewfs;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashSet;
-
 import org.apache.hadoop.fs.Path;
 
 /**

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java

@@ -164,6 +164,7 @@ public class ViewFileSystem extends FileSystem {
    *          this FileSystem
    * @param conf the configuration
    */
+  @Override
   public void initialize(final URI theUri, final Configuration conf)
       throws IOException {
     super.initialize(theUri, conf);

+ 2 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFsFileStatus.java

@@ -42,7 +42,8 @@ class ViewFsFileStatus extends FileStatus {
      return super.equals(o);
    }
    
-   public int hashCode() {
+   @Override
+  public int hashCode() {
      return super.hashCode();
    }
    

+ 4 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java

@@ -892,6 +892,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
       final List<ACL> acl, final CreateMode mode)
       throws InterruptedException, KeeperException {
     return zkDoWithRetries(new ZKAction<String>() {
+      @Override
       public String run() throws KeeperException, InterruptedException {
         return zkClient.create(path, data, acl, mode);
       }
@@ -901,6 +902,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
   private byte[] getDataWithRetries(final String path, final boolean watch,
       final Stat stat) throws InterruptedException, KeeperException {
     return zkDoWithRetries(new ZKAction<byte[]>() {
+      @Override
       public byte[] run() throws KeeperException, InterruptedException {
         return zkClient.getData(path, watch, stat);
       }
@@ -910,6 +912,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
   private Stat setDataWithRetries(final String path, final byte[] data,
       final int version) throws InterruptedException, KeeperException {
     return zkDoWithRetries(new ZKAction<Stat>() {
+      @Override
       public Stat run() throws KeeperException, InterruptedException {
         return zkClient.setData(path, data, version);
       }
@@ -919,6 +922,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
   private void deleteWithRetries(final String path, final int version)
       throws KeeperException, InterruptedException {
     zkDoWithRetries(new ZKAction<Void>() {
+      @Override
       public Void run() throws KeeperException, InterruptedException {
         zkClient.delete(path, version);
         return null;

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java

@@ -56,6 +56,7 @@ public interface HAServiceProtocol {
       this.name = name;
     }
 
+    @Override
     public String toString() {
       return name;
     }

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java

@@ -184,6 +184,7 @@ public class NodeFencer {
       this.arg = arg;
     }
     
+    @Override
     public String toString() {
       return method.getClass().getCanonicalName() + "(" + arg + ")";
     }

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java

@@ -274,6 +274,7 @@ public class SshFenceByTcpPort extends Configured
     static final Log LOG = LogFactory.getLog(
         SshFenceByTcpPort.class.getName() + ".jsch");
 
+    @Override
     public boolean isEnabled(int level) {
       switch (level) {
       case com.jcraft.jsch.Logger.DEBUG:
@@ -291,6 +292,7 @@ public class SshFenceByTcpPort extends Configured
       }
     }
       
+    @Override
     public void log(int level, String message) {
       switch (level) {
       case com.jcraft.jsch.Logger.DEBUG:

+ 4 - 5
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFCRpcServer.java

@@ -55,11 +55,10 @@ public class ZKFCRpcServer implements ZKFCProtocol {
         new ZKFCProtocolServerSideTranslatorPB(this);
     BlockingService service = ZKFCProtocolService
         .newReflectiveBlockingService(translator);
-    this.server = RPC.getServer(
-        ZKFCProtocolPB.class,
-        service, bindAddr.getHostName(),
-            bindAddr.getPort(), HANDLER_COUNT, false, conf,
-            null /*secretManager*/);
+    this.server = new RPC.Builder(conf).setProtocol(ZKFCProtocolPB.class)
+        .setInstance(service).setBindAddress(bindAddr.getHostName())
+        .setPort(bindAddr.getPort()).setNumHandlers(HANDLER_COUNT)
+        .setVerbose(false).build();
     
     // set service-level authorization security policy
     if (conf.getBoolean(

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java

@@ -474,7 +474,7 @@ public class HttpServer implements FilterContainer {
     }
   }
 
-  /** {@inheritDoc} */
+  @Override
   public void addFilter(String name, String classname,
       Map<String, String> parameters) {
 
@@ -494,7 +494,7 @@ public class HttpServer implements FilterContainer {
     filterNames.add(name);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public void addGlobalFilter(String name, String classname,
       Map<String, String> parameters) {
     final String[] ALL_URLS = { "/*" };

+ 4 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java

@@ -164,16 +164,18 @@ public abstract class AbstractMapWritable implements Writable, Configurable {
   }
 
   /** @return the conf */
+  @Override
   public Configuration getConf() {
     return conf.get();
   }
 
   /** @param conf the conf to set */
+  @Override
   public void setConf(Configuration conf) {
     this.conf.set(conf);
   }
   
-  /** {@inheritDoc} */
+  @Override
   public void write(DataOutput out) throws IOException {
     
     // First write out the size of the class table and any classes that are
@@ -187,7 +189,7 @@ public abstract class AbstractMapWritable implements Writable, Configurable {
     }
   }
   
-  /** {@inheritDoc} */
+  @Override
   public void readFields(DataInput in) throws IOException {
     
     // Get the number of "unknown" classes

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayWritable.java

@@ -88,6 +88,7 @@ public class ArrayWritable implements Writable {
 
   public Writable[] get() { return values; }
 
+  @Override
   public void readFields(DataInput in) throws IOException {
     values = new Writable[in.readInt()];          // construct values
     for (int i = 0; i < values.length; i++) {
@@ -97,6 +98,7 @@ public class ArrayWritable implements Writable {
     }
   }
 
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeInt(values.length);                 // write values
     for (int i = 0; i < values.length; i++) {

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java

@@ -57,12 +57,14 @@ public class BooleanWritable implements WritableComparable<BooleanWritable> {
 
   /**
    */
+  @Override
   public void readFields(DataInput in) throws IOException {
     value = in.readBoolean();
   }
 
   /**
    */
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeBoolean(value);
   }

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java

@@ -39,10 +39,12 @@ public class ByteWritable implements WritableComparable<ByteWritable> {
   /** Return the value of this ByteWritable. */
   public byte get() { return value; }
 
+  @Override
   public void readFields(DataInput in) throws IOException {
     value = in.readByte();
   }
 
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeByte(value);
   }

+ 4 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java

@@ -81,6 +81,7 @@ public class BytesWritable extends BinaryComparable
    * if you need the returned array to be precisely the length of the data.
    * @return The data is only valid between 0 and getLength() - 1.
    */
+  @Override
   public byte[] getBytes() {
     return bytes;
   }
@@ -97,6 +98,7 @@ public class BytesWritable extends BinaryComparable
   /**
    * Get the current size of the buffer.
    */
+  @Override
   public int getLength() {
     return size;
   }
@@ -171,6 +173,7 @@ public class BytesWritable extends BinaryComparable
   }
 
   // inherit javadoc
+  @Override
   public void readFields(DataInput in) throws IOException {
     setSize(0); // clear the old data
     setSize(in.readInt());
@@ -178,6 +181,7 @@ public class BytesWritable extends BinaryComparable
   }
   
   // inherit javadoc
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeInt(size);
     out.write(bytes, 0, size);

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java

@@ -45,6 +45,7 @@ public abstract class CompressedWritable implements Writable {
 
   public CompressedWritable() {}
 
+  @Override
   public final void readFields(DataInput in) throws IOException {
     compressed = new byte[in.readInt()];
     in.readFully(compressed, 0, compressed.length);
@@ -70,6 +71,7 @@ public abstract class CompressedWritable implements Writable {
   protected abstract void readFieldsCompressed(DataInput in)
     throws IOException;
 
+  @Override
   public final void write(DataOutput out) throws IOException {
     if (compressed == null) {
       ByteArrayOutputStream deflated = new ByteArrayOutputStream();

+ 0 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputByteBuffer.java

@@ -21,8 +21,6 @@ package org.apache.hadoop.io;
 import java.io.DataInputStream;
 import java.io.InputStream;
 import java.nio.ByteBuffer;
-import java.util.LinkedList;
-import java.util.List;
 
 public class DataInputByteBuffer extends DataInputStream {
 

+ 3 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java

@@ -72,6 +72,7 @@ public class DefaultStringifier<T> implements Stringifier<T> {
     }
   }
 
+  @Override
   public T fromString(String str) throws IOException {
     try {
       byte[] bytes = Base64.decodeBase64(str.getBytes("UTF-8"));
@@ -83,6 +84,7 @@ public class DefaultStringifier<T> implements Stringifier<T> {
     }
   }
 
+  @Override
   public String toString(T obj) throws IOException {
     outBuf.reset();
     serializer.serialize(obj);
@@ -91,6 +93,7 @@ public class DefaultStringifier<T> implements Stringifier<T> {
     return new String(Base64.encodeBase64(buf));
   }
 
+  @Override
   public void close() throws IOException {
     inBuf.close();
     outBuf.close();

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java

@@ -42,10 +42,12 @@ public class DoubleWritable implements WritableComparable<DoubleWritable> {
     set(value);
   }
   
+  @Override
   public void readFields(DataInput in) throws IOException {
     value = in.readDouble();
   }
 
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeDouble(value);
   }

+ 8 - 7
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java

@@ -23,7 +23,6 @@ import java.io.DataOutput;
 import java.io.IOException;
 import java.util.EnumSet;
 import java.util.Iterator;
-import java.util.Collection;
 import java.util.AbstractCollection;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -46,8 +45,11 @@ public class EnumSetWritable<E extends Enum<E>> extends AbstractCollection<E>
   EnumSetWritable() {
   }
 
+  @Override
   public Iterator<E> iterator() { return value.iterator(); }
+  @Override
   public int size() { return value.size(); }
+  @Override
   public boolean add(E e) {
     if (value == null) {
       value = EnumSet.of(e);
@@ -109,7 +111,7 @@ public class EnumSetWritable<E extends Enum<E>> extends AbstractCollection<E>
     return value;
   }
 
-  /** {@inheritDoc} */
+  @Override
   @SuppressWarnings("unchecked")
   public void readFields(DataInput in) throws IOException {
     int length = in.readInt();
@@ -127,7 +129,7 @@ public class EnumSetWritable<E extends Enum<E>> extends AbstractCollection<E>
     }
   }
 
-  /** {@inheritDoc} */
+  @Override
   public void write(DataOutput out) throws IOException {
     if (this.value == null) {
       out.writeInt(-1);
@@ -152,6 +154,7 @@ public class EnumSetWritable<E extends Enum<E>> extends AbstractCollection<E>
    * Returns true if <code>o</code> is an EnumSetWritable with the same value,
    * or both are null.
    */
+  @Override
   public boolean equals(Object o) {
     if (o == null) {
       throw new IllegalArgumentException("null argument passed in equal().");
@@ -180,27 +183,25 @@ public class EnumSetWritable<E extends Enum<E>> extends AbstractCollection<E>
     return elementType;
   }
 
-  /** {@inheritDoc} */
+  @Override
   public int hashCode() {
     if (value == null)
       return 0;
     return (int) value.hashCode();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public String toString() {
     if (value == null)
       return "(null)";
     return value.toString();
   }
 
-  /** {@inheritDoc} */
   @Override
   public Configuration getConf() {
     return this.conf;
   }
 
-  /** {@inheritDoc} */
   @Override
   public void setConf(Configuration conf) {
     this.conf = conf;

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java

@@ -39,10 +39,12 @@ public class FloatWritable implements WritableComparable<FloatWritable> {
   /** Return the value of this FloatWritable. */
   public float get() { return value; }
 
+  @Override
   public void readFields(DataInput in) throws IOException {
     value = in.readFloat();
   }
 
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeFloat(value);
   }

+ 5 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java

@@ -114,11 +114,13 @@ public abstract class GenericWritable implements Writable, Configurable {
     return instance;
   }
   
+  @Override
   public String toString() {
     return "GW[" + (instance != null ? ("class=" + instance.getClass().getName() +
         ",value=" + instance.toString()) : "(null)") + "]";
   }
 
+  @Override
   public void readFields(DataInput in) throws IOException {
     type = in.readByte();
     Class<? extends Writable> clazz = getTypes()[type & 0xff];
@@ -131,6 +133,7 @@ public abstract class GenericWritable implements Writable, Configurable {
     instance.readFields(in);
   }
 
+  @Override
   public void write(DataOutput out) throws IOException {
     if (type == NOT_SET || instance == null)
       throw new IOException("The GenericWritable has NOT been set correctly. type="
@@ -145,10 +148,12 @@ public abstract class GenericWritable implements Writable, Configurable {
    */
   abstract protected Class<? extends Writable>[] getTypes();
 
+  @Override
   public Configuration getConf() {
     return conf;
   }
 
+  @Override
   public void setConf(Configuration conf) {
     this.conf = conf;
   }

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java

@@ -272,9 +272,11 @@ public class IOUtils {
    * The /dev/null of OutputStreams.
    */
   public static class NullOutputStream extends OutputStream {
+    @Override
     public void write(byte[] b, int off, int len) throws IOException {
     }
 
+    @Override
     public void write(int b) throws IOException {
     }
   }  

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java

@@ -42,10 +42,12 @@ public class IntWritable implements WritableComparable<IntWritable> {
   /** Return the value of this IntWritable. */
   public int get() { return value; }
 
+  @Override
   public void readFields(DataInput in) throws IOException {
     value = in.readInt();
   }
 
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeInt(value);
   }

+ 8 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java

@@ -42,15 +42,18 @@ public class LongWritable implements WritableComparable<LongWritable> {
   /** Return the value of this LongWritable. */
   public long get() { return value; }
 
+  @Override
   public void readFields(DataInput in) throws IOException {
     value = in.readLong();
   }
 
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeLong(value);
   }
 
   /** Returns true iff <code>o</code> is a LongWritable with the same value. */
+  @Override
   public boolean equals(Object o) {
     if (!(o instanceof LongWritable))
       return false;
@@ -58,17 +61,20 @@ public class LongWritable implements WritableComparable<LongWritable> {
     return this.value == other.value;
   }
 
+  @Override
   public int hashCode() {
     return (int)value;
   }
 
   /** Compares two LongWritables. */
+  @Override
   public int compareTo(LongWritable o) {
     long thisValue = this.value;
     long thatValue = o.value;
     return (thisValue<thatValue ? -1 : (thisValue==thatValue ? 0 : 1));
   }
 
+  @Override
   public String toString() {
     return Long.toString(value);
   }
@@ -79,6 +85,7 @@ public class LongWritable implements WritableComparable<LongWritable> {
       super(LongWritable.class);
     }
 
+    @Override
     public int compare(byte[] b1, int s1, int l1,
                        byte[] b2, int s2, int l2) {
       long thisValue = readLong(b1, s1);
@@ -94,6 +101,7 @@ public class LongWritable implements WritableComparable<LongWritable> {
     public int compare(WritableComparable a, WritableComparable b) {
       return -super.compare(a, b);
     }
+    @Override
     public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
       return -super.compare(b1, s1, l1, b2, s2, l2);
     }

+ 8 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java

@@ -36,6 +36,7 @@ public class MD5Hash implements WritableComparable<MD5Hash> {
   public static final int MD5_LEN = 16;
 
   private static ThreadLocal<MessageDigest> DIGESTER_FACTORY = new ThreadLocal<MessageDigest>() {
+    @Override
     protected MessageDigest initialValue() {
       try {
         return MessageDigest.getInstance("MD5");
@@ -65,6 +66,7 @@ public class MD5Hash implements WritableComparable<MD5Hash> {
   }
   
   // javadoc from Writable
+  @Override
   public void readFields(DataInput in) throws IOException {
     in.readFully(digest);
   }
@@ -77,6 +79,7 @@ public class MD5Hash implements WritableComparable<MD5Hash> {
   }
 
   // javadoc from Writable
+  @Override
   public void write(DataOutput out) throws IOException {
     out.write(digest);
   }
@@ -155,6 +158,7 @@ public class MD5Hash implements WritableComparable<MD5Hash> {
 
   /** Returns true iff <code>o</code> is an MD5Hash whose digest contains the
    * same values.  */
+  @Override
   public boolean equals(Object o) {
     if (!(o instanceof MD5Hash))
       return false;
@@ -165,12 +169,14 @@ public class MD5Hash implements WritableComparable<MD5Hash> {
   /** Returns a hash code value for this object.
    * Only uses the first 4 bytes, since md5s are evenly distributed.
    */
+  @Override
   public int hashCode() {
     return quarterDigest();
   }
 
 
   /** Compares this object with the specified object for order.*/
+  @Override
   public int compareTo(MD5Hash that) {
     return WritableComparator.compareBytes(this.digest, 0, MD5_LEN,
                                            that.digest, 0, MD5_LEN);
@@ -182,6 +188,7 @@ public class MD5Hash implements WritableComparable<MD5Hash> {
       super(MD5Hash.class);
     }
 
+    @Override
     public int compare(byte[] b1, int s1, int l1,
                        byte[] b2, int s2, int l2) {
       return compareBytes(b1, s1, MD5_LEN, b2, s2, MD5_LEN);
@@ -196,6 +203,7 @@ public class MD5Hash implements WritableComparable<MD5Hash> {
   {'0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f'};
 
   /** Returns a string representation of this object. */
+  @Override
   public String toString() {
     StringBuilder buf = new StringBuilder(MD5_LEN*2);
     for (int i = 0; i < MD5_LEN; i++) {

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java

@@ -296,6 +296,7 @@ public class MapFile {
     }
 
     /** Close the map. */
+    @Override
     public synchronized void close() throws IOException {
       data.close();
       index.close();
@@ -723,6 +724,7 @@ public class MapFile {
     }
 
     /** Close the map. */
+    @Override
     public synchronized void close() throws IOException {
       if (!indexClosed) {
         index.close();

+ 14 - 16
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java

@@ -55,27 +55,27 @@ public class MapWritable extends AbstractMapWritable
     copy(other);
   }
   
-  /** {@inheritDoc} */
+  @Override
   public void clear() {
     instance.clear();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public boolean containsKey(Object key) {
     return instance.containsKey(key);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public boolean containsValue(Object value) {
     return instance.containsValue(value);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public Set<Map.Entry<Writable, Writable>> entrySet() {
     return instance.entrySet();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public boolean equals(Object obj) {
     if (this == obj) {
       return true;
@@ -93,27 +93,27 @@ public class MapWritable extends AbstractMapWritable
     return false;
   }
 
-  /** {@inheritDoc} */
+  @Override
   public Writable get(Object key) {
     return instance.get(key);
   }
   
-  /** {@inheritDoc} */
+  @Override
   public int hashCode() {
     return 1 + this.instance.hashCode();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public boolean isEmpty() {
     return instance.isEmpty();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public Set<Writable> keySet() {
     return instance.keySet();
   }
 
-  /** {@inheritDoc} */
+  @Override
   @SuppressWarnings("unchecked")
   public Writable put(Writable key, Writable value) {
     addToMap(key.getClass());
@@ -121,31 +121,30 @@ public class MapWritable extends AbstractMapWritable
     return instance.put(key, value);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public void putAll(Map<? extends Writable, ? extends Writable> t) {
     for (Map.Entry<? extends Writable, ? extends Writable> e: t.entrySet()) {
       put(e.getKey(), e.getValue());
     }
   }
 
-  /** {@inheritDoc} */
+  @Override
   public Writable remove(Object key) {
     return instance.remove(key);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public int size() {
     return instance.size();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public Collection<Writable> values() {
     return instance.values();
   }
   
   // Writable
   
-  /** {@inheritDoc} */
   @Override
   public void write(DataOutput out) throws IOException {
     super.write(out);
@@ -164,7 +163,6 @@ public class MapWritable extends AbstractMapWritable
     }
   }
 
-  /** {@inheritDoc} */
   @SuppressWarnings("unchecked")
   @Override
   public void readFields(DataInput in) throws IOException {

+ 4 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java

@@ -35,6 +35,7 @@ public class NullWritable implements WritableComparable<NullWritable> {
   /** Returns the single instance of this class. */
   public static NullWritable get() { return THIS; }
   
+  @Override
   public String toString() {
     return "(null)";
   }
@@ -46,8 +47,11 @@ public class NullWritable implements WritableComparable<NullWritable> {
   public int compareTo(NullWritable other) {
     return 0;
   }
+  @Override
   public boolean equals(Object other) { return other instanceof NullWritable; }
+  @Override
   public void readFields(DataInput in) throws IOException {}
+  @Override
   public void write(DataOutput out) throws IOException {}
 
   /** A Comparator &quot;optimized&quot; for NullWritable. */

+ 7 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java

@@ -66,15 +66,18 @@ public class ObjectWritable implements Writable, Configurable {
     this.instance = instance;
   }
   
+  @Override
   public String toString() {
     return "OW[class=" + declaredClass + ",value=" + instance + "]";
   }
 
   
+  @Override
   public void readFields(DataInput in) throws IOException {
     readObject(in, this, this.conf);
   }
   
+  @Override
   public void write(DataOutput out) throws IOException {
     writeObject(out, instance, declaredClass, conf);
   }
@@ -99,6 +102,7 @@ public class ObjectWritable implements Writable, Configurable {
       super(conf);
       this.declaredClass = declaredClass;
     }
+    @Override
     public void readFields(DataInput in) throws IOException {
       String className = UTF8.readString(in);
       declaredClass = PRIMITIVE_NAMES.get(className);
@@ -110,6 +114,7 @@ public class ObjectWritable implements Writable, Configurable {
         }
       }
     }
+    @Override
     public void write(DataOutput out) throws IOException {
       UTF8.writeString(out, declaredClass.getName());
     }
@@ -375,10 +380,12 @@ public class ObjectWritable implements Writable, Configurable {
     return declaredClass;
   }
 
+  @Override
   public void setConf(Configuration conf) {
     this.conf = conf;
   }
 
+  @Override
   public Configuration getConf() {
     return this.conf;
   }

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java

@@ -50,6 +50,7 @@ public class OutputBuffer extends FilterOutputStream {
   private static class Buffer extends ByteArrayOutputStream {
     public byte[] getData() { return buf; }
     public int getLength() { return count; }
+    @Override
     public void reset() { count = 0; }
 
     public void write(InputStream in, int len) throws IOException {

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java

@@ -194,6 +194,7 @@ public class ReadaheadPool {
       this.len = len;
     }
     
+    @Override
     public void run() {
       if (canceled) return;
       // There's a very narrow race here that the file will close right at

+ 0 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java

@@ -24,7 +24,6 @@ import java.io.FileOutputStream;
 import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;

+ 32 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java

@@ -625,15 +625,18 @@ public class SequenceFile {
       dataSize = length;
     }
     
+    @Override
     public int getSize() {
       return dataSize;
     }
     
+    @Override
     public void writeUncompressedBytes(DataOutputStream outStream)
       throws IOException {
       outStream.write(data, 0, dataSize);
     }
 
+    @Override
     public void writeCompressedBytes(DataOutputStream outStream) 
       throws IllegalArgumentException, IOException {
       throw 
@@ -666,10 +669,12 @@ public class SequenceFile {
       dataSize = length;
     }
     
+    @Override
     public int getSize() {
       return dataSize;
     }
     
+    @Override
     public void writeUncompressedBytes(DataOutputStream outStream)
       throws IOException {
       if (decompressedStream == null) {
@@ -687,6 +692,7 @@ public class SequenceFile {
       }
     }
 
+    @Override
     public void writeCompressedBytes(DataOutputStream outStream) 
       throws IllegalArgumentException, IOException {
       outStream.write(data, 0, dataSize);
@@ -728,6 +734,7 @@ public class SequenceFile {
       return new TreeMap<Text, Text>(this.theMetadata);
     }
     
+    @Override
     public void write(DataOutput out) throws IOException {
       out.writeInt(this.theMetadata.size());
       Iterator<Map.Entry<Text, Text>> iter =
@@ -739,6 +746,7 @@ public class SequenceFile {
       }
     }
 
+    @Override
     public void readFields(DataInput in) throws IOException {
       int sz = in.readInt();
       if (sz < 0) throw new IOException("Invalid size: " + sz + " for file metadata object");
@@ -752,6 +760,7 @@ public class SequenceFile {
       }    
     }
 
+    @Override
     public boolean equals(Object other) {
       if (other == null) {
         return false;
@@ -788,11 +797,13 @@ public class SequenceFile {
       return true;
     }
 
+    @Override
     public int hashCode() {
       assert false : "hashCode not designed";
       return 42; // any arbitrary constant will do 
     }
     
+    @Override
     public String toString() {
       StringBuilder sb = new StringBuilder();
       sb.append("size: ").append(this.theMetadata.size()).append("\n");
@@ -1250,6 +1261,7 @@ public class SequenceFile {
     Configuration getConf() { return conf; }
     
     /** Close the file. */
+    @Override
     public synchronized void close() throws IOException {
       keySerializer.close();
       uncompressedValSerializer.close();
@@ -1360,6 +1372,7 @@ public class SequenceFile {
     }
 
     /** Append a key/value pair. */
+    @Override
     @SuppressWarnings("unchecked")
     public synchronized void append(Object key, Object val)
       throws IOException {
@@ -1392,6 +1405,7 @@ public class SequenceFile {
     }
 
     /** Append a key/value pair. */
+    @Override
     public synchronized void appendRaw(byte[] keyData, int keyOffset,
         int keyLength, ValueBytes val) throws IOException {
 
@@ -1449,6 +1463,7 @@ public class SequenceFile {
     }
     
     /** Compress and flush contents to dfs */
+    @Override
     public synchronized void sync() throws IOException {
       if (noBufferedRecords > 0) {
         super.sync();
@@ -1478,6 +1493,7 @@ public class SequenceFile {
     }
     
     /** Close the file. */
+    @Override
     public synchronized void close() throws IOException {
       if (out != null) {
         sync();
@@ -1486,6 +1502,7 @@ public class SequenceFile {
     }
 
     /** Append a key/value pair. */
+    @Override
     @SuppressWarnings("unchecked")
     public synchronized void append(Object key, Object val)
       throws IOException {
@@ -1518,6 +1535,7 @@ public class SequenceFile {
     }
     
     /** Append a key/value pair. */
+    @Override
     public synchronized void appendRaw(byte[] keyData, int keyOffset,
         int keyLength, ValueBytes val) throws IOException {
       
@@ -1960,6 +1978,7 @@ public class SequenceFile {
     }
     
     /** Close the file. */
+    @Override
     public synchronized void close() throws IOException {
       // Return the decompressors to the pool
       CodecPool.returnDecompressor(keyLenDecompressor);
@@ -2618,6 +2637,7 @@ public class SequenceFile {
     }
 
     /** Returns the name of the file. */
+    @Override
     public String toString() {
       return filename;
     }
@@ -2948,6 +2968,7 @@ public class SequenceFile {
         mergeSort.mergeSort(pointersCopy, pointers, 0, count);
       }
       class SeqFileComparator implements Comparator<IntWritable> {
+        @Override
         public int compare(IntWritable I, IntWritable J) {
           return comparator.compare(rawBuffer, keyOffsets[I.get()], 
                                     keyLengths[I.get()], rawBuffer, 
@@ -3221,6 +3242,7 @@ public class SequenceFile {
         this.tmpDir = tmpDir;
         this.progress = progress;
       }
+      @Override
       protected boolean lessThan(Object a, Object b) {
         // indicate we're making progress
         if (progress != null) {
@@ -3232,6 +3254,7 @@ public class SequenceFile {
                                   msa.getKey().getLength(), msb.getKey().getData(), 0, 
                                   msb.getKey().getLength()) < 0;
       }
+      @Override
       public void close() throws IOException {
         SegmentDescriptor ms;                           // close inputs
         while ((ms = (SegmentDescriptor)pop()) != null) {
@@ -3239,12 +3262,15 @@ public class SequenceFile {
         }
         minSegment = null;
       }
+      @Override
       public DataOutputBuffer getKey() throws IOException {
         return rawKey;
       }
+      @Override
       public ValueBytes getValue() throws IOException {
         return rawValue;
       }
+      @Override
       public boolean next() throws IOException {
         if (size() == 0)
           return false;
@@ -3272,6 +3298,7 @@ public class SequenceFile {
         return true;
       }
       
+      @Override
       public Progress getProgress() {
         return mergeProgress; 
       }
@@ -3469,6 +3496,7 @@ public class SequenceFile {
         return preserveInput;
       }
       
+      @Override
       public int compareTo(Object o) {
         SegmentDescriptor that = (SegmentDescriptor)o;
         if (this.segmentLength != that.segmentLength) {
@@ -3481,6 +3509,7 @@ public class SequenceFile {
           compareTo(that.segmentPathName.toString());
       }
 
+      @Override
       public boolean equals(Object o) {
         if (!(o instanceof SegmentDescriptor)) {
           return false;
@@ -3495,6 +3524,7 @@ public class SequenceFile {
         return false;
       }
 
+      @Override
       public int hashCode() {
         return 37 * 17 + (int) (segmentOffset^(segmentOffset>>>32));
       }
@@ -3584,12 +3614,14 @@ public class SequenceFile {
       /** The default cleanup. Subclasses can override this with a custom 
        * cleanup 
        */
+      @Override
       public void cleanup() throws IOException {
         super.close();
         if (super.shouldPreserveInput()) return;
         parentContainer.cleanup();
       }
       
+      @Override
       public boolean equals(Object o) {
         if (!(o instanceof LinkedSegmentsDescriptor)) {
           return false;

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java

@@ -87,6 +87,7 @@ public class SetFile extends MapFile {
     }
 
     // javadoc inherited
+    @Override
     public boolean seek(WritableComparable key)
       throws IOException {
       return super.seek(key);

+ 18 - 20
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java

@@ -57,86 +57,86 @@ public class SortedMapWritable extends AbstractMapWritable
     copy(other);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public Comparator<? super WritableComparable> comparator() {
     // Returning null means we use the natural ordering of the keys
     return null;
   }
 
-  /** {@inheritDoc} */
+  @Override
   public WritableComparable firstKey() {
     return instance.firstKey();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public SortedMap<WritableComparable, Writable>
   headMap(WritableComparable toKey) {
     
     return instance.headMap(toKey);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public WritableComparable lastKey() {
     return instance.lastKey();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public SortedMap<WritableComparable, Writable>
   subMap(WritableComparable fromKey, WritableComparable toKey) {
     
     return instance.subMap(fromKey, toKey);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public SortedMap<WritableComparable, Writable>
   tailMap(WritableComparable fromKey) {
     
     return instance.tailMap(fromKey);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public void clear() {
     instance.clear();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public boolean containsKey(Object key) {
     return instance.containsKey(key);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public boolean containsValue(Object value) {
     return instance.containsValue(value);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public Set<java.util.Map.Entry<WritableComparable, Writable>> entrySet() {
     return instance.entrySet();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public Writable get(Object key) {
     return instance.get(key);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public boolean isEmpty() {
     return instance.isEmpty();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public Set<WritableComparable> keySet() {
     return instance.keySet();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public Writable put(WritableComparable key, Writable value) {
     addToMap(key.getClass());
     addToMap(value.getClass());
     return instance.put(key, value);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public void putAll(Map<? extends WritableComparable, ? extends Writable> t) {
     for (Map.Entry<? extends WritableComparable, ? extends Writable> e:
       t.entrySet()) {
@@ -145,22 +145,21 @@ public class SortedMapWritable extends AbstractMapWritable
     }
   }
 
-  /** {@inheritDoc} */
+  @Override
   public Writable remove(Object key) {
     return instance.remove(key);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public int size() {
     return instance.size();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public Collection<Writable> values() {
     return instance.values();
   }
 
-  /** {@inheritDoc} */
   @SuppressWarnings("unchecked")
   @Override
   public void readFields(DataInput in) throws IOException {
@@ -187,7 +186,6 @@ public class SortedMapWritable extends AbstractMapWritable
     }
   }
 
-  /** {@inheritDoc} */
   @Override
   public void write(DataOutput out) throws IOException {
     super.write(out);

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Stringifier.java

@@ -54,6 +54,7 @@ public interface Stringifier<T> extends java.io.Closeable {
    * Closes this object. 
    * @throws IOException if an I/O error occurs 
    * */
+  @Override
   public void close() throws IOException;
   
 }

+ 8 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java

@@ -55,6 +55,7 @@ public class Text extends BinaryComparable
   
   private static ThreadLocal<CharsetEncoder> ENCODER_FACTORY =
     new ThreadLocal<CharsetEncoder>() {
+      @Override
       protected CharsetEncoder initialValue() {
         return Charset.forName("UTF-8").newEncoder().
                onMalformedInput(CodingErrorAction.REPORT).
@@ -64,6 +65,7 @@ public class Text extends BinaryComparable
   
   private static ThreadLocal<CharsetDecoder> DECODER_FACTORY =
     new ThreadLocal<CharsetDecoder>() {
+    @Override
     protected CharsetDecoder initialValue() {
       return Charset.forName("UTF-8").newDecoder().
              onMalformedInput(CodingErrorAction.REPORT).
@@ -112,11 +114,13 @@ public class Text extends BinaryComparable
    * valid. Please use {@link #copyBytes()} if you
    * need the returned array to be precisely the length of the data.
    */
+  @Override
   public byte[] getBytes() {
     return bytes;
   }
 
   /** Returns the number of bytes in the byte array */ 
+  @Override
   public int getLength() {
     return length;
   }
@@ -281,6 +285,7 @@ public class Text extends BinaryComparable
   
   /** deserialize 
    */
+  @Override
   public void readFields(DataInput in) throws IOException {
     int newLength = WritableUtils.readVInt(in);
     setCapacity(newLength, false);
@@ -313,6 +318,7 @@ public class Text extends BinaryComparable
    * length uses zero-compressed encoding
    * @see Writable#write(DataOutput)
    */
+  @Override
   public void write(DataOutput out) throws IOException {
     WritableUtils.writeVInt(out, length);
     out.write(bytes, 0, length);
@@ -329,6 +335,7 @@ public class Text extends BinaryComparable
   }
 
   /** Returns true iff <code>o</code> is a Text with the same contents.  */
+  @Override
   public boolean equals(Object o) {
     if (o instanceof Text)
       return super.equals(o);
@@ -346,6 +353,7 @@ public class Text extends BinaryComparable
       super(Text.class);
     }
 
+    @Override
     public int compare(byte[] b1, int s1, int l1,
                        byte[] b2, int s2, int l2) {
       int n1 = WritableUtils.decodeVIntSize(b1[s1]);

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/TwoDArrayWritable.java

@@ -57,6 +57,7 @@ public class TwoDArrayWritable implements Writable {
 
   public Writable[][] get() { return values; }
 
+  @Override
   public void readFields(DataInput in) throws IOException {
     // construct matrix
     values = new Writable[in.readInt()][];          
@@ -81,6 +82,7 @@ public class TwoDArrayWritable implements Writable {
     }
   }
 
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeInt(values.length);                 // write values
     for (int i = 0; i < values.length; i++) {

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java

@@ -110,6 +110,7 @@ public class UTF8 implements WritableComparable<UTF8> {
     System.arraycopy(other.bytes, 0, bytes, 0, length);
   }
 
+  @Override
   public void readFields(DataInput in) throws IOException {
     length = in.readUnsignedShort();
     if (bytes == null || bytes.length < length)
@@ -123,6 +124,7 @@ public class UTF8 implements WritableComparable<UTF8> {
     WritableUtils.skipFully(in, length);
   }
 
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeShort(length);
     out.write(bytes, 0, length);

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java

@@ -43,10 +43,12 @@ public class VIntWritable implements WritableComparable<VIntWritable> {
   /** Return the value of this VIntWritable. */
   public int get() { return value; }
 
+  @Override
   public void readFields(DataInput in) throws IOException {
     value = WritableUtils.readVInt(in);
   }
 
+  @Override
   public void write(DataOutput out) throws IOException {
     WritableUtils.writeVInt(out, value);
   }

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java

@@ -43,10 +43,12 @@ public class VLongWritable implements WritableComparable<VLongWritable> {
   /** Return the value of this LongWritable. */
   public long get() { return value; }
 
+  @Override
   public void readFields(DataInput in) throws IOException {
     value = WritableUtils.readVLong(in);
   }
 
+  @Override
   public void write(DataOutput out) throws IOException {
     WritableUtils.writeVLong(out, value);
   }

Some files were not shown because too many files changed in this diff