Browse Source

Merge trunk into QJM branch

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-3077@1380990 13f79535-47bb-0310-9956-ffa450edef68
Todd Lipcon 12 years ago
parent
commit
99ec5bd8d3
100 changed files with 619 additions and 133 deletions
  1. 22 2
      BUILDING.txt
  2. 1 0
      hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/RootDocProcessor.java
  3. 44 0
      hadoop-common-project/hadoop-common/CHANGES.txt
  4. 50 1
      hadoop-common-project/hadoop-common/src/JNIFlags.cmake
  5. 8 0
      hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
  6. 4 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
  7. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java
  8. 0 11
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java
  9. 0 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
  10. 5 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java
  11. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java
  12. 7 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BufferedFSInputStream.java
  13. 15 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
  14. 9 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
  15. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java
  16. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java
  17. 4 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DU.java
  18. 7 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java
  19. 7 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
  20. 6 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputStream.java
  21. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java
  22. 2 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java
  23. 26 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
  24. 5 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java
  25. 9 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
  26. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
  27. 26 12
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
  28. 0 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java
  29. 3 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsServerDefaults.java
  30. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
  31. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java
  32. 0 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlConnection.java
  33. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java
  34. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobFilter.java
  35. 26 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
  36. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java
  37. 3 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocatedFileStatus.java
  38. 10 10
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java
  39. 0 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java
  40. 4 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java
  41. 30 6
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
  42. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
  43. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java
  44. 9 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java
  45. 17 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSImpl.java
  46. 10 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSInputStream.java
  47. 4 5
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSOutputStream.java
  48. 8 5
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java
  49. 6 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java
  50. 14 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
  51. 4 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java
  52. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java
  53. 12 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java
  54. 3 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFormat.java
  55. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
  56. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
  57. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsCommand.java
  58. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
  59. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
  60. 0 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/NotInMountpointException.java
  61. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
  62. 2 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFsFileStatus.java
  63. 4 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
  64. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java
  65. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java
  66. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java
  67. 4 5
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFCRpcServer.java
  68. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
  69. 4 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java
  70. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayWritable.java
  71. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java
  72. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java
  73. 4 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java
  74. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java
  75. 0 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputByteBuffer.java
  76. 3 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java
  77. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java
  78. 8 7
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java
  79. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java
  80. 5 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java
  81. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java
  82. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java
  83. 8 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java
  84. 8 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java
  85. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
  86. 14 16
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java
  87. 4 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java
  88. 7 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java
  89. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java
  90. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java
  91. 0 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java
  92. 32 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
  93. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java
  94. 18 20
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java
  95. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Stringifier.java
  96. 8 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
  97. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/TwoDArrayWritable.java
  98. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java
  99. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java
  100. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java

+ 22 - 2
BUILDING.txt

@@ -54,12 +54,32 @@ Maven build goals:
  Build options:
  Build options:
 
 
   * Use -Pnative to compile/bundle native code
   * Use -Pnative to compile/bundle native code
-  * Use -Dsnappy.prefix=(/usr/local) & -Dbundle.snappy=(false) to compile
-    Snappy JNI bindings and to bundle Snappy SO files
   * Use -Pdocs to generate & bundle the documentation in the distribution (using -Pdist)
   * Use -Pdocs to generate & bundle the documentation in the distribution (using -Pdist)
   * Use -Psrc to create a project source TAR.GZ
   * Use -Psrc to create a project source TAR.GZ
   * Use -Dtar to create a TAR with the distribution (using -Pdist)
   * Use -Dtar to create a TAR with the distribution (using -Pdist)
 
 
+ Snappy build options:
+
+   Snappy is a compression library that can be utilized by the native code.
+   It is currently an optional component, meaning that Hadoop can be built with
+   or without this dependency.
+
+  * Use -Drequire.snappy to fail the build if libsnappy.so is not found.
+    If this option is not specified and the snappy library is missing,
+    we silently build a version of libhadoop.so that cannot make use of snappy.
+    This option is recommended if you plan on making use of snappy and want
+    to get more repeatable builds.
+
+  * Use -Dsnappy.prefix to specify a nonstandard location for the libsnappy
+    header files and library files. You do not need this option if you have
+    installed snappy using a package manager.
+  * Use -Dsnappy.lib to specify a nonstandard location for the libsnappy library
+    files.  Similarly to snappy.prefix, you do not need this option if you have
+    installed snappy using a package manager.
+  * Use -Dbundle.snappy to copy the contents of the snappy.lib directory into
+    the final tar file. This option requires that -Dsnappy.lib is also given,
+    and it ignores the -Dsnappy.prefix option.
+
    Tests options:
    Tests options:
 
 
   * Use -DskipTests to skip tests when running the following Maven goals:
   * Use -DskipTests to skip tests when running the following Maven goals:

+ 1 - 0
hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/RootDocProcessor.java

@@ -97,6 +97,7 @@ class RootDocProcessor {
       this.target = target;
       this.target = target;
     }
     }
     
     
+    @Override
     public Object invoke(Object proxy, Method method, Object[] args)
     public Object invoke(Object proxy, Method method, Object[] args)
 	throws Throwable {
 	throws Throwable {
       String methodName = method.getName();
       String methodName = method.getName();

+ 44 - 0
hadoop-common-project/hadoop-common/CHANGES.txt

@@ -95,6 +95,14 @@ Trunk (unreleased changes)
     the message is printed and the stack trace is not printed to avoid chatter.
     the message is printed and the stack trace is not printed to avoid chatter.
     (Brandon Li via Suresh)
     (Brandon Li via Suresh)
 
 
+    HADOOP-8719. Workaround for kerberos-related log errors upon running any
+    hadoop command on OSX. (Jianbin Wei via harsh)
+
+    HADOOP-8619. WritableComparator must implement no-arg constructor.
+    (Chris Douglas via Suresh)
+
+    HADOOP-8736. Add Builder for building RPC server. (Brandon Li via Suresh)
+
   BUG FIXES
   BUG FIXES
 
 
     HADOOP-8177. MBeans shouldn't try to register when it fails to create MBeanName.
     HADOOP-8177. MBeans shouldn't try to register when it fails to create MBeanName.
@@ -191,6 +199,9 @@ Trunk (unreleased changes)
     HADOOP-8623. hadoop jar command should respect HADOOP_OPTS.
     HADOOP-8623. hadoop jar command should respect HADOOP_OPTS.
     (Steven Willis via suresh)
     (Steven Willis via suresh)
 
 
+    HADOOP-8684. Deadlock between WritableComparator and WritableComparable.
+    (Jing Zhao via suresh)
+
   OPTIMIZATIONS
   OPTIMIZATIONS
 
 
     HADOOP-7761. Improve the performance of raw comparisons. (todd)
     HADOOP-7761. Improve the performance of raw comparisons. (todd)
@@ -314,6 +325,9 @@ Branch-2 ( Unreleased changes )
     HADOOP-8075. Lower native-hadoop library log from info to debug.
     HADOOP-8075. Lower native-hadoop library log from info to debug.
     (Hızır Sefa İrken via eli)
     (Hızır Sefa İrken via eli)
 
 
+    HADOOP-8748. Refactor DFSClient retry utility methods to a new class
+    in org.apache.hadoop.io.retry.  (Arun C Murthy via szetszwo)
+
   BUG FIXES
   BUG FIXES
 
 
     HADOOP-8372. NetUtils.normalizeHostName() incorrectly handles hostname
     HADOOP-8372. NetUtils.normalizeHostName() incorrectly handles hostname
@@ -439,6 +453,18 @@ Branch-2 ( Unreleased changes )
     HADOOP-8031. Configuration class fails to find embedded .jar resources; 
     HADOOP-8031. Configuration class fails to find embedded .jar resources; 
     should use URL.openStream() (genman via tucu)
     should use URL.openStream() (genman via tucu)
 
 
+    HADOOP-8738. junit JAR is showing up in the distro (tucu)
+
+    HADOOP-8737. cmake: always use JAVA_HOME to find libjvm.so, jni.h, jni_md.h.
+    (Colin Patrick McCabe via eli)
+
+    HADOOP-8747. Syntax error on cmake version 2.6 patch 2 in JNIFlags.cmake. (cmccabe via tucu)
+
+    HADOOP-8722. Update BUILDING.txt with latest snappy info.
+    (Colin Patrick McCabe via eli)
+
+    HADOOP-8764. CMake: HADOOP-8737 broke ARM build. (Trevor Robinson via eli)
+
   BREAKDOWN OF HDFS-3042 SUBTASKS
   BREAKDOWN OF HDFS-3042 SUBTASKS
 
 
     HADOOP-8220. ZKFailoverController doesn't handle failure to become active
     HADOOP-8220. ZKFailoverController doesn't handle failure to become active
@@ -841,6 +867,18 @@ Release 2.0.0-alpha - 05-23-2012
     HADOOP-8655. Fix TextInputFormat for large deliminators. (Gelesh via
     HADOOP-8655. Fix TextInputFormat for large deliminators. (Gelesh via
     bobby) 
     bobby) 
 
 
+Release 0.23.4 - UNRELEASED
+
+  INCOMPATIBLE CHANGES
+
+  NEW FEATURES
+
+  IMPROVEMENTS
+
+  OPTIMIZATIONS
+
+  BUG FIXES
+
 Release 0.23.3 - UNRELEASED
 Release 0.23.3 - UNRELEASED
 
 
   INCOMPATIBLE CHANGES
   INCOMPATIBLE CHANGES
@@ -977,6 +1015,12 @@ Release 0.23.3 - UNRELEASED
 
 
     HADOOP-8725. MR is broken when security is off (daryn via bobby)
     HADOOP-8725. MR is broken when security is off (daryn via bobby)
 
 
+    HADOOP-8726. The Secrets in Credentials are not available to MR tasks
+    (daryn and Benoy Antony via bobby)
+
+    HADOOP-8727. Gracefully deprecate dfs.umaskmode in 2.x onwards (Harsh J
+    via bobby)
+
 Release 0.23.2 - UNRELEASED 
 Release 0.23.2 - UNRELEASED 
 
 
   INCOMPATIBLE CHANGES
   INCOMPATIBLE CHANGES

+ 50 - 1
hadoop-common-project/hadoop-common/src/JNIFlags.cmake

@@ -65,4 +65,53 @@ if (CMAKE_SYSTEM_PROCESSOR MATCHES "^arm" AND CMAKE_SYSTEM_NAME STREQUAL "Linux"
     endif (READELF MATCHES "NOTFOUND")
     endif (READELF MATCHES "NOTFOUND")
 endif (CMAKE_SYSTEM_PROCESSOR MATCHES "^arm" AND CMAKE_SYSTEM_NAME STREQUAL "Linux")
 endif (CMAKE_SYSTEM_PROCESSOR MATCHES "^arm" AND CMAKE_SYSTEM_NAME STREQUAL "Linux")
 
 
-find_package(JNI REQUIRED)
+IF("${CMAKE_SYSTEM}" MATCHES "Linux")
+    #
+    # Locate JNI_INCLUDE_DIRS and JNI_LIBRARIES.
+    # Since we were invoked from Maven, we know that the JAVA_HOME environment
+    # variable is valid.  So we ignore system paths here and just use JAVA_HOME.
+    #
+    FILE(TO_CMAKE_PATH "$ENV{JAVA_HOME}" _JAVA_HOME)
+    IF(CMAKE_SYSTEM_PROCESSOR MATCHES "^i.86$")
+        SET(_java_libarch "i386")
+    ELSEIF (CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "amd64")
+        SET(_java_libarch "amd64")
+    ELSEIF (CMAKE_SYSTEM_PROCESSOR MATCHES "^arm")
+        SET(_java_libarch "arm")
+    ELSE()
+        SET(_java_libarch ${CMAKE_SYSTEM_PROCESSOR})
+    ENDIF()
+    SET(_JDK_DIRS "${_JAVA_HOME}/jre/lib/${_java_libarch}/*"
+                  "${_JAVA_HOME}/jre/lib/${_java_libarch}"
+                  "${_JAVA_HOME}/jre/lib/*"
+                  "${_JAVA_HOME}/jre/lib"
+                  "${_JAVA_HOME}/lib/*"
+                  "${_JAVA_HOME}/lib"
+                  "${_JAVA_HOME}/include/*"
+                  "${_JAVA_HOME}/include"
+                  "${_JAVA_HOME}"
+    )
+    FIND_PATH(JAVA_INCLUDE_PATH
+        NAMES jni.h 
+        PATHS ${_JDK_DIRS}
+        NO_DEFAULT_PATH)
+    FIND_PATH(JAVA_INCLUDE_PATH2 
+        NAMES jni_md.h
+        PATHS ${_JDK_DIRS}
+        NO_DEFAULT_PATH)
+    SET(JNI_INCLUDE_DIRS ${JAVA_INCLUDE_PATH} ${JAVA_INCLUDE_PATH2})
+    FIND_LIBRARY(JAVA_JVM_LIBRARY
+        NAMES jvm JavaVM
+        PATHS ${_JDK_DIRS}
+        NO_DEFAULT_PATH)
+    SET(JNI_LIBRARIES ${JAVA_JVM_LIBRARY})
+    MESSAGE("JAVA_HOME=${JAVA_HOME}, JAVA_JVM_LIBRARY=${JAVA_JVM_LIBRARY}")
+    MESSAGE("JAVA_INCLUDE_PATH=${JAVA_INCLUDE_PATH}, JAVA_INCLUDE_PATH2=${JAVA_INCLUDE_PATH2}")
+    IF(JAVA_JVM_LIBRARY AND JAVA_INCLUDE_PATH AND JAVA_INCLUDE_PATH2)
+        MESSAGE("Located all JNI components successfully.")
+    ELSE()
+        MESSAGE(FATAL_ERROR "Failed to find a viable JVM installation under JAVA_HOME.")
+    ENDIF()
+ELSE()
+    find_package(JNI REQUIRED)
+ENDIF()

+ 8 - 0
hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh

@@ -47,6 +47,14 @@ done
 # Extra Java runtime options.  Empty by default.
 # Extra Java runtime options.  Empty by default.
 export HADOOP_OPTS="-Djava.net.preferIPv4Stack=true $HADOOP_CLIENT_OPTS"
 export HADOOP_OPTS="-Djava.net.preferIPv4Stack=true $HADOOP_CLIENT_OPTS"
 
 
+MAC_OSX=false
+case "`uname`" in
+Darwin*) MAC_OSX=true;;
+esac
+if $MAC_OSX; then
+    export HADOOP_OPTS="$HADOOP_OPTS -Djava.security.krb5.realm= -Djava.security.krb5.kdc="
+fi
+
 # Command specific options appended to HADOOP_OPTS when specified
 # Command specific options appended to HADOOP_OPTS when specified
 export HADOOP_NAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER:-INFO,NullAppender} $HADOOP_NAMENODE_OPTS"
 export HADOOP_NAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER:-INFO,NullAppender} $HADOOP_NAMENODE_OPTS"
 export HADOOP_DATANODE_OPTS="-Dhadoop.security.logger=ERROR,RFAS $HADOOP_DATANODE_OPTS"
 export HADOOP_DATANODE_OPTS="-Dhadoop.security.logger=ERROR,RFAS $HADOOP_DATANODE_OPTS"

+ 4 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java

@@ -1847,6 +1847,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
    * 
    * 
    * @return an iterator over the entries.
    * @return an iterator over the entries.
    */
    */
+  @Override
   public Iterator<Map.Entry<String, String>> iterator() {
   public Iterator<Map.Entry<String, String>> iterator() {
     // Get a copy of just the string to string pairs. After the old object
     // Get a copy of just the string to string pairs. After the old object
     // methods that allow non-strings to be put into configurations are removed,
     // methods that allow non-strings to be put into configurations are removed,
@@ -2272,6 +2273,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
   }
   }
 
 
   //@Override
   //@Override
+  @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     Properties props = getProps();
     Properties props = getProps();
     WritableUtils.writeVInt(out, props.size());
     WritableUtils.writeVInt(out, props.size());
@@ -2322,6 +2324,8 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
                new String[]{CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY});
                new String[]{CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY});
     Configuration.addDeprecation("fs.default.name", 
     Configuration.addDeprecation("fs.default.name", 
                new String[]{CommonConfigurationKeys.FS_DEFAULT_NAME_KEY});
                new String[]{CommonConfigurationKeys.FS_DEFAULT_NAME_KEY});
+    Configuration.addDeprecation("dfs.umaskmode",
+        new String[]{CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY});
   }
   }
   
   
   /**
   /**

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java

@@ -39,11 +39,13 @@ public class Configured implements Configurable {
   }
   }
 
 
   // inherit javadoc
   // inherit javadoc
+  @Override
   public void setConf(Configuration conf) {
   public void setConf(Configuration conf) {
     this.conf = conf;
     this.conf = conf;
   }
   }
 
 
   // inherit javadoc
   // inherit javadoc
+  @Override
   public Configuration getConf() {
   public Configuration getConf() {
     return conf;
     return conf;
   }
   }

+ 0 - 11
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java

@@ -23,12 +23,10 @@ import org.apache.commons.logging.*;
 import org.apache.commons.lang.StringEscapeUtils;
 import org.apache.commons.lang.StringEscapeUtils;
 
 
 import java.util.Collection;
 import java.util.Collection;
-import java.util.Map;
 import java.util.Enumeration;
 import java.util.Enumeration;
 import java.io.IOException;
 import java.io.IOException;
 import java.io.PrintWriter;
 import java.io.PrintWriter;
 
 
-import javax.servlet.ServletContext;
 import javax.servlet.ServletException;
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletRequest;
@@ -57,9 +55,6 @@ public class ReconfigurationServlet extends HttpServlet {
   public static final String CONF_SERVLET_RECONFIGURABLE_PREFIX =
   public static final String CONF_SERVLET_RECONFIGURABLE_PREFIX =
     "conf.servlet.reconfigurable.";
     "conf.servlet.reconfigurable.";
   
   
-  /**
-   * {@inheritDoc}
-   */
   @Override
   @Override
   public void init() throws ServletException {
   public void init() throws ServletException {
     super.init();
     super.init();
@@ -202,9 +197,6 @@ public class ReconfigurationServlet extends HttpServlet {
     }
     }
   }
   }
 
 
-  /**
-   * {@inheritDoc}
-   */
   @Override
   @Override
   protected void doGet(HttpServletRequest req, HttpServletResponse resp)
   protected void doGet(HttpServletRequest req, HttpServletResponse resp)
     throws ServletException, IOException {
     throws ServletException, IOException {
@@ -219,9 +211,6 @@ public class ReconfigurationServlet extends HttpServlet {
     printFooter(out);
     printFooter(out);
   }
   }
 
 
-  /**
-   * {@inheritDoc}
-   */
   @Override
   @Override
   protected void doPost(HttpServletRequest req, HttpServletResponse resp)
   protected void doPost(HttpServletRequest req, HttpServletResponse resp)
     throws ServletException, IOException {
     throws ServletException, IOException {

+ 0 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java

@@ -47,7 +47,6 @@ import org.apache.hadoop.fs.InvalidPathException;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.util.DataChecksum;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.Progressable;
 
 
 /**
 /**

+ 5 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java

@@ -45,22 +45,27 @@ public class AvroFSInput implements Closeable, SeekableInput {
     this.stream = fc.open(p);
     this.stream = fc.open(p);
   }
   }
 
 
+  @Override
   public long length() {
   public long length() {
     return len;
     return len;
   }
   }
 
 
+  @Override
   public int read(byte[] b, int off, int len) throws IOException {
   public int read(byte[] b, int off, int len) throws IOException {
     return stream.read(b, off, len);
     return stream.read(b, off, len);
   }
   }
 
 
+  @Override
   public void seek(long p) throws IOException {
   public void seek(long p) throws IOException {
     stream.seek(p);
     stream.seek(p);
   }
   }
 
 
+  @Override
   public long tell() throws IOException {
   public long tell() throws IOException {
     return stream.getPos();
     return stream.getPos();
   }
   }
 
 
+  @Override
   public void close() throws IOException {
   public void close() throws IOException {
     stream.close();
     stream.close();
   }
   }

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java

@@ -204,6 +204,7 @@ public class BlockLocation {
     }
     }
   }
   }
 
 
+  @Override
   public String toString() {
   public String toString() {
     StringBuilder result = new StringBuilder();
     StringBuilder result = new StringBuilder();
     result.append(offset);
     result.append(offset);

+ 7 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BufferedFSInputStream.java

@@ -19,7 +19,6 @@ package org.apache.hadoop.fs;
 
 
 import java.io.BufferedInputStream;
 import java.io.BufferedInputStream;
 import java.io.FileDescriptor;
 import java.io.FileDescriptor;
-import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.IOException;
 
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -50,10 +49,12 @@ implements Seekable, PositionedReadable, HasFileDescriptor {
     super(in, size);
     super(in, size);
   }
   }
 
 
+  @Override
   public long getPos() throws IOException {
   public long getPos() throws IOException {
     return ((FSInputStream)in).getPos()-(count-pos);
     return ((FSInputStream)in).getPos()-(count-pos);
   }
   }
 
 
+  @Override
   public long skip(long n) throws IOException {
   public long skip(long n) throws IOException {
     if (n <= 0) {
     if (n <= 0) {
       return 0;
       return 0;
@@ -63,6 +64,7 @@ implements Seekable, PositionedReadable, HasFileDescriptor {
     return n;
     return n;
   }
   }
 
 
+  @Override
   public void seek(long pos) throws IOException {
   public void seek(long pos) throws IOException {
     if( pos<0 ) {
     if( pos<0 ) {
       return;
       return;
@@ -82,20 +84,24 @@ implements Seekable, PositionedReadable, HasFileDescriptor {
     ((FSInputStream)in).seek(pos);
     ((FSInputStream)in).seek(pos);
   }
   }
 
 
+  @Override
   public boolean seekToNewSource(long targetPos) throws IOException {
   public boolean seekToNewSource(long targetPos) throws IOException {
     pos = 0;
     pos = 0;
     count = 0;
     count = 0;
     return ((FSInputStream)in).seekToNewSource(targetPos);
     return ((FSInputStream)in).seekToNewSource(targetPos);
   }
   }
 
 
+  @Override
   public int read(long position, byte[] buffer, int offset, int length) throws IOException {
   public int read(long position, byte[] buffer, int offset, int length) throws IOException {
     return ((FSInputStream)in).read(position, buffer, offset, length) ;
     return ((FSInputStream)in).read(position, buffer, offset, length) ;
   }
   }
 
 
+  @Override
   public void readFully(long position, byte[] buffer, int offset, int length) throws IOException {
   public void readFully(long position, byte[] buffer, int offset, int length) throws IOException {
     ((FSInputStream)in).readFully(position, buffer, offset, length);
     ((FSInputStream)in).readFully(position, buffer, offset, length);
   }
   }
 
 
+  @Override
   public void readFully(long position, byte[] buffer) throws IOException {
   public void readFully(long position, byte[] buffer) throws IOException {
     ((FSInputStream)in).readFully(position, buffer);
     ((FSInputStream)in).readFully(position, buffer);
   }
   }

+ 15 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java

@@ -53,6 +53,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
     super(fs);
     super(fs);
   }
   }
 
 
+  @Override
   public void setConf(Configuration conf) {
   public void setConf(Configuration conf) {
     super.setConf(conf);
     super.setConf(conf);
     if (conf != null) {
     if (conf != null) {
@@ -64,6 +65,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
   /**
   /**
    * Set whether to verify checksum.
    * Set whether to verify checksum.
    */
    */
+  @Override
   public void setVerifyChecksum(boolean verifyChecksum) {
   public void setVerifyChecksum(boolean verifyChecksum) {
     this.verifyChecksum = verifyChecksum;
     this.verifyChecksum = verifyChecksum;
   }
   }
@@ -74,6 +76,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
   }
   }
   
   
   /** get the raw file system */
   /** get the raw file system */
+  @Override
   public FileSystem getRawFileSystem() {
   public FileSystem getRawFileSystem() {
     return fs;
     return fs;
   }
   }
@@ -162,14 +165,17 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
       return HEADER_LENGTH + 4*(dataPos/bytesPerSum);
       return HEADER_LENGTH + 4*(dataPos/bytesPerSum);
     }
     }
     
     
+    @Override
     protected long getChunkPosition( long dataPos ) {
     protected long getChunkPosition( long dataPos ) {
       return dataPos/bytesPerSum*bytesPerSum;
       return dataPos/bytesPerSum*bytesPerSum;
     }
     }
     
     
+    @Override
     public int available() throws IOException {
     public int available() throws IOException {
       return datas.available() + super.available();
       return datas.available() + super.available();
     }
     }
     
     
+    @Override
     public int read(long position, byte[] b, int off, int len)
     public int read(long position, byte[] b, int off, int len)
       throws IOException {
       throws IOException {
       // parameter check
       // parameter check
@@ -190,6 +196,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
       return nread;
       return nread;
     }
     }
     
     
+    @Override
     public void close() throws IOException {
     public void close() throws IOException {
       datas.close();
       datas.close();
       if( sums != null ) {
       if( sums != null ) {
@@ -290,6 +297,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
      * @exception  IOException  if an I/O error occurs.
      * @exception  IOException  if an I/O error occurs.
      *             ChecksumException if the chunk to skip to is corrupted
      *             ChecksumException if the chunk to skip to is corrupted
      */
      */
+    @Override
     public synchronized long skip(long n) throws IOException {
     public synchronized long skip(long n) throws IOException {
       long curPos = getPos();
       long curPos = getPos();
       long fileLength = getFileLength();
       long fileLength = getFileLength();
@@ -311,6 +319,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
      *             ChecksumException if the chunk to seek to is corrupted
      *             ChecksumException if the chunk to seek to is corrupted
      */
      */
 
 
+    @Override
     public synchronized void seek(long pos) throws IOException {
     public synchronized void seek(long pos) throws IOException {
       if(pos>getFileLength()) {
       if(pos>getFileLength()) {
         throw new IOException("Cannot seek after EOF");
         throw new IOException("Cannot seek after EOF");
@@ -339,7 +348,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
     return new FSDataBoundedInputStream(fs, f, in);
     return new FSDataBoundedInputStream(fs, f, in);
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public FSDataOutputStream append(Path f, int bufferSize,
   public FSDataOutputStream append(Path f, int bufferSize,
       Progressable progress) throws IOException {
       Progressable progress) throws IOException {
     throw new IOException("Not supported");
     throw new IOException("Not supported");
@@ -398,6 +407,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
       sums.writeInt(bytesPerSum);
       sums.writeInt(bytesPerSum);
     }
     }
     
     
+    @Override
     public void close() throws IOException {
     public void close() throws IOException {
       flushBuffer();
       flushBuffer();
       sums.close();
       sums.close();
@@ -412,7 +422,6 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
     }
     }
   }
   }
 
 
-  /** {@inheritDoc} */
   @Override
   @Override
   public FSDataOutputStream create(Path f, FsPermission permission,
   public FSDataOutputStream create(Path f, FsPermission permission,
       boolean overwrite, int bufferSize, short replication, long blockSize,
       boolean overwrite, int bufferSize, short replication, long blockSize,
@@ -454,7 +463,6 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
     return out;
     return out;
   }
   }
 
 
-  /** {@inheritDoc} */
   @Override
   @Override
   public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
   public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
       boolean overwrite, int bufferSize, short replication, long blockSize,
       boolean overwrite, int bufferSize, short replication, long blockSize,
@@ -472,6 +480,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
    * @return true if successful;
    * @return true if successful;
    *         false if file does not exist or is a directory
    *         false if file does not exist or is a directory
    */
    */
+  @Override
   public boolean setReplication(Path src, short replication) throws IOException {
   public boolean setReplication(Path src, short replication) throws IOException {
     boolean value = fs.setReplication(src, replication);
     boolean value = fs.setReplication(src, replication);
     if (!value)
     if (!value)
@@ -487,6 +496,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
   /**
   /**
    * Rename files/dirs
    * Rename files/dirs
    */
    */
+  @Override
   public boolean rename(Path src, Path dst) throws IOException {
   public boolean rename(Path src, Path dst) throws IOException {
     if (fs.isDirectory(src)) {
     if (fs.isDirectory(src)) {
       return fs.rename(src, dst);
       return fs.rename(src, dst);
@@ -516,6 +526,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
    * Implement the delete(Path, boolean) in checksum
    * Implement the delete(Path, boolean) in checksum
    * file system.
    * file system.
    */
    */
+  @Override
   public boolean delete(Path f, boolean recursive) throws IOException{
   public boolean delete(Path f, boolean recursive) throws IOException{
     FileStatus fstatus = null;
     FileStatus fstatus = null;
     try {
     try {
@@ -538,6 +549,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
   }
   }
     
     
   final private static PathFilter DEFAULT_FILTER = new PathFilter() {
   final private static PathFilter DEFAULT_FILTER = new PathFilter() {
+    @Override
     public boolean accept(Path file) {
     public boolean accept(Path file) {
       return !isChecksumFile(file);
       return !isChecksumFile(file);
     }
     }

+ 9 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java

@@ -32,7 +32,6 @@ import org.apache.hadoop.fs.Options.ChecksumOpt;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.PureJavaCrc32;
 import org.apache.hadoop.util.PureJavaCrc32;
-import org.apache.hadoop.util.StringUtils;
 
 
 /**
 /**
  * Abstract Checksumed Fs.
  * Abstract Checksumed Fs.
@@ -61,6 +60,7 @@ public abstract class ChecksumFs extends FilterFs {
   /**
   /**
    * Set whether to verify checksum.
    * Set whether to verify checksum.
    */
    */
+  @Override
   public void setVerifyChecksum(boolean inVerifyChecksum) {
   public void setVerifyChecksum(boolean inVerifyChecksum) {
     this.verifyChecksum = inVerifyChecksum;
     this.verifyChecksum = inVerifyChecksum;
   }
   }
@@ -152,14 +152,17 @@ public abstract class ChecksumFs extends FilterFs {
       return HEADER_LENGTH + 4*(dataPos/bytesPerSum);
       return HEADER_LENGTH + 4*(dataPos/bytesPerSum);
     }
     }
     
     
+    @Override
     protected long getChunkPosition(long dataPos) {
     protected long getChunkPosition(long dataPos) {
       return dataPos/bytesPerSum*bytesPerSum;
       return dataPos/bytesPerSum*bytesPerSum;
     }
     }
     
     
+    @Override
     public int available() throws IOException {
     public int available() throws IOException {
       return datas.available() + super.available();
       return datas.available() + super.available();
     }
     }
     
     
+    @Override
     public int read(long position, byte[] b, int off, int len)
     public int read(long position, byte[] b, int off, int len)
       throws IOException, UnresolvedLinkException {
       throws IOException, UnresolvedLinkException {
       // parameter check
       // parameter check
@@ -180,6 +183,7 @@ public abstract class ChecksumFs extends FilterFs {
       return nread;
       return nread;
     }
     }
     
     
+    @Override
     public void close() throws IOException {
     public void close() throws IOException {
       datas.close();
       datas.close();
       if (sums != null) {
       if (sums != null) {
@@ -258,6 +262,7 @@ public abstract class ChecksumFs extends FilterFs {
      * @exception  IOException  if an I/O error occurs.
      * @exception  IOException  if an I/O error occurs.
      *             ChecksumException if the chunk to skip to is corrupted
      *             ChecksumException if the chunk to skip to is corrupted
      */
      */
+    @Override
     public synchronized long skip(long n) throws IOException { 
     public synchronized long skip(long n) throws IOException { 
       final long curPos = getPos();
       final long curPos = getPos();
       final long fileLength = getFileLength();
       final long fileLength = getFileLength();
@@ -279,6 +284,7 @@ public abstract class ChecksumFs extends FilterFs {
      *             ChecksumException if the chunk to seek to is corrupted
      *             ChecksumException if the chunk to seek to is corrupted
      */
      */
 
 
+    @Override
     public synchronized void seek(long pos) throws IOException { 
     public synchronized void seek(long pos) throws IOException { 
       if (pos>getFileLength()) {
       if (pos>getFileLength()) {
         throw new IOException("Cannot seek after EOF");
         throw new IOException("Cannot seek after EOF");
@@ -348,6 +354,7 @@ public abstract class ChecksumFs extends FilterFs {
       sums.writeInt(bytesPerSum);
       sums.writeInt(bytesPerSum);
     }
     }
     
     
+    @Override
     public void close() throws IOException {
     public void close() throws IOException {
       flushBuffer();
       flushBuffer();
       sums.close();
       sums.close();
@@ -447,6 +454,7 @@ public abstract class ChecksumFs extends FilterFs {
    * Implement the delete(Path, boolean) in checksum
    * Implement the delete(Path, boolean) in checksum
    * file system.
    * file system.
    */
    */
+  @Override
   public boolean delete(Path f, boolean recursive) 
   public boolean delete(Path f, boolean recursive) 
     throws IOException, UnresolvedLinkException {
     throws IOException, UnresolvedLinkException {
     FileStatus fstatus = null;
     FileStatus fstatus = null;

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java

@@ -75,7 +75,7 @@ public class ContentSummary implements Writable{
   /** Returns (disk) space quota */
   /** Returns (disk) space quota */
   public long getSpaceQuota() {return spaceQuota;}
   public long getSpaceQuota() {return spaceQuota;}
   
   
-  /** {@inheritDoc} */
+  @Override
   @InterfaceAudience.Private
   @InterfaceAudience.Private
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     out.writeLong(length);
     out.writeLong(length);
@@ -86,7 +86,7 @@ public class ContentSummary implements Writable{
     out.writeLong(spaceQuota);
     out.writeLong(spaceQuota);
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   @InterfaceAudience.Private
   @InterfaceAudience.Private
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     this.length = in.readLong();
     this.length = in.readLong();
@@ -131,7 +131,7 @@ public class ContentSummary implements Writable{
     return qOption ? QUOTA_HEADER : HEADER;
     return qOption ? QUOTA_HEADER : HEADER;
   }
   }
   
   
-  /** {@inheritDoc} */
+  @Override
   public String toString() {
   public String toString() {
     return toString(true);
     return toString(true);
   }
   }

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java

@@ -131,6 +131,7 @@ public class DF extends Shell {
     return mount;
     return mount;
   }
   }
   
   
+  @Override
   public String toString() {
   public String toString() {
     return
     return
       "df -k " + mount +"\n" +
       "df -k " + mount +"\n" +

+ 4 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DU.java

@@ -76,6 +76,7 @@ public class DU extends Shell {
    **/
    **/
   class DURefreshThread implements Runnable {
   class DURefreshThread implements Runnable {
     
     
+    @Override
     public void run() {
     public void run() {
       
       
       while(shouldRun) {
       while(shouldRun) {
@@ -169,16 +170,19 @@ public class DU extends Shell {
     }
     }
   }
   }
   
   
+  @Override
   public String toString() {
   public String toString() {
     return
     return
       "du -sk " + dirPath +"\n" +
       "du -sk " + dirPath +"\n" +
       used + "\t" + dirPath;
       used + "\t" + dirPath;
   }
   }
 
 
+  @Override
   protected String[] getExecString() {
   protected String[] getExecString() {
     return new String[] {"du", "-sk", dirPath};
     return new String[] {"du", "-sk", dirPath};
   }
   }
   
   
+  @Override
   protected void parseExecResult(BufferedReader lines) throws IOException {
   protected void parseExecResult(BufferedReader lines) throws IOException {
     String line = lines.readLine();
     String line = lines.readLine();
     if (line == null) {
     if (line == null) {

+ 7 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java

@@ -44,6 +44,7 @@ public class FSDataInputStream extends DataInputStream
    *
    *
    * @param desired offset to seek to
    * @param desired offset to seek to
    */
    */
+  @Override
   public synchronized void seek(long desired) throws IOException {
   public synchronized void seek(long desired) throws IOException {
     ((Seekable)in).seek(desired);
     ((Seekable)in).seek(desired);
   }
   }
@@ -53,6 +54,7 @@ public class FSDataInputStream extends DataInputStream
    *
    *
    * @return current position in the input stream
    * @return current position in the input stream
    */
    */
+  @Override
   public long getPos() throws IOException {
   public long getPos() throws IOException {
     return ((Seekable)in).getPos();
     return ((Seekable)in).getPos();
   }
   }
@@ -68,6 +70,7 @@ public class FSDataInputStream extends DataInputStream
    *         if there is no more data because the end of the stream has been
    *         if there is no more data because the end of the stream has been
    *         reached
    *         reached
    */
    */
+  @Override
   public int read(long position, byte[] buffer, int offset, int length)
   public int read(long position, byte[] buffer, int offset, int length)
     throws IOException {
     throws IOException {
     return ((PositionedReadable)in).read(position, buffer, offset, length);
     return ((PositionedReadable)in).read(position, buffer, offset, length);
@@ -85,6 +88,7 @@ public class FSDataInputStream extends DataInputStream
    *                      If an exception is thrown an undetermined number
    *                      If an exception is thrown an undetermined number
    *                      of bytes in the buffer may have been written. 
    *                      of bytes in the buffer may have been written. 
    */
    */
+  @Override
   public void readFully(long position, byte[] buffer, int offset, int length)
   public void readFully(long position, byte[] buffer, int offset, int length)
     throws IOException {
     throws IOException {
     ((PositionedReadable)in).readFully(position, buffer, offset, length);
     ((PositionedReadable)in).readFully(position, buffer, offset, length);
@@ -93,6 +97,7 @@ public class FSDataInputStream extends DataInputStream
   /**
   /**
    * See {@link #readFully(long, byte[], int, int)}.
    * See {@link #readFully(long, byte[], int, int)}.
    */
    */
+  @Override
   public void readFully(long position, byte[] buffer)
   public void readFully(long position, byte[] buffer)
     throws IOException {
     throws IOException {
     ((PositionedReadable)in).readFully(position, buffer, 0, buffer.length);
     ((PositionedReadable)in).readFully(position, buffer, 0, buffer.length);
@@ -104,6 +109,7 @@ public class FSDataInputStream extends DataInputStream
    * @param  targetPos  position to seek to
    * @param  targetPos  position to seek to
    * @return true if a new source is found, false otherwise
    * @return true if a new source is found, false otherwise
    */
    */
+  @Override
   public boolean seekToNewSource(long targetPos) throws IOException {
   public boolean seekToNewSource(long targetPos) throws IOException {
     return ((Seekable)in).seekToNewSource(targetPos); 
     return ((Seekable)in).seekToNewSource(targetPos); 
   }
   }
@@ -118,6 +124,7 @@ public class FSDataInputStream extends DataInputStream
     return in;
     return in;
   }
   }
 
 
+  @Override
   public int read(ByteBuffer buf) throws IOException {
   public int read(ByteBuffer buf) throws IOException {
     if (in instanceof ByteBufferReadable) {
     if (in instanceof ByteBufferReadable) {
       return ((ByteBufferReadable)in).read(buf);
       return ((ByteBufferReadable)in).read(buf);

+ 7 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java

@@ -140,6 +140,7 @@ abstract public class FSInputChecker extends FSInputStream {
    * @exception  IOException  if an I/O error occurs.
    * @exception  IOException  if an I/O error occurs.
    */
    */
 
 
+  @Override
   public synchronized int read() throws IOException {
   public synchronized int read() throws IOException {
     if (pos >= count) {
     if (pos >= count) {
       fill();
       fill();
@@ -180,6 +181,7 @@ abstract public class FSInputChecker extends FSInputStream {
    * @exception  IOException  if an I/O error occurs.
    * @exception  IOException  if an I/O error occurs.
    *             ChecksumException if any checksum error occurs
    *             ChecksumException if any checksum error occurs
    */
    */
+  @Override
   public synchronized int read(byte[] b, int off, int len) throws IOException {
   public synchronized int read(byte[] b, int off, int len) throws IOException {
     // parameter check
     // parameter check
     if ((off | len | (off + len) | (b.length - (off + len))) < 0) {
     if ((off | len | (off + len) | (b.length - (off + len))) < 0) {
@@ -367,6 +369,7 @@ abstract public class FSInputChecker extends FSInputStream {
    * @exception  IOException  if an I/O error occurs.
    * @exception  IOException  if an I/O error occurs.
    *             ChecksumException if the chunk to skip to is corrupted
    *             ChecksumException if the chunk to skip to is corrupted
    */
    */
+  @Override
   public synchronized long skip(long n) throws IOException {
   public synchronized long skip(long n) throws IOException {
     if (n <= 0) {
     if (n <= 0) {
       return 0;
       return 0;
@@ -389,6 +392,7 @@ abstract public class FSInputChecker extends FSInputStream {
    *             ChecksumException if the chunk to seek to is corrupted
    *             ChecksumException if the chunk to seek to is corrupted
    */
    */
 
 
+  @Override
   public synchronized void seek(long pos) throws IOException {
   public synchronized void seek(long pos) throws IOException {
     if( pos<0 ) {
     if( pos<0 ) {
       return;
       return;
@@ -462,13 +466,16 @@ abstract public class FSInputChecker extends FSInputStream {
     this.pos = 0;
     this.pos = 0;
   }
   }
 
 
+  @Override
   final public boolean markSupported() {
   final public boolean markSupported() {
     return false;
     return false;
   }
   }
   
   
+  @Override
   final public void mark(int readlimit) {
   final public void mark(int readlimit) {
   }
   }
   
   
+  @Override
   final public void reset() throws IOException {
   final public void reset() throws IOException {
     throw new IOException("mark/reset not supported");
     throw new IOException("mark/reset not supported");
   }
   }

+ 6 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputStream.java

@@ -36,19 +36,23 @@ public abstract class FSInputStream extends InputStream
    * The next read() will be from that location.  Can't
    * The next read() will be from that location.  Can't
    * seek past the end of the file.
    * seek past the end of the file.
    */
    */
+  @Override
   public abstract void seek(long pos) throws IOException;
   public abstract void seek(long pos) throws IOException;
 
 
   /**
   /**
    * Return the current offset from the start of the file
    * Return the current offset from the start of the file
    */
    */
+  @Override
   public abstract long getPos() throws IOException;
   public abstract long getPos() throws IOException;
 
 
   /**
   /**
    * Seeks a different copy of the data.  Returns true if 
    * Seeks a different copy of the data.  Returns true if 
    * found a new source, false otherwise.
    * found a new source, false otherwise.
    */
    */
+  @Override
   public abstract boolean seekToNewSource(long targetPos) throws IOException;
   public abstract boolean seekToNewSource(long targetPos) throws IOException;
 
 
+  @Override
   public int read(long position, byte[] buffer, int offset, int length)
   public int read(long position, byte[] buffer, int offset, int length)
     throws IOException {
     throws IOException {
     synchronized (this) {
     synchronized (this) {
@@ -64,6 +68,7 @@ public abstract class FSInputStream extends InputStream
     }
     }
   }
   }
     
     
+  @Override
   public void readFully(long position, byte[] buffer, int offset, int length)
   public void readFully(long position, byte[] buffer, int offset, int length)
     throws IOException {
     throws IOException {
     int nread = 0;
     int nread = 0;
@@ -76,6 +81,7 @@ public abstract class FSInputStream extends InputStream
     }
     }
   }
   }
     
     
+  @Override
   public void readFully(long position, byte[] buffer)
   public void readFully(long position, byte[] buffer)
     throws IOException {
     throws IOException {
     readFully(position, buffer, 0, buffer.length);
     readFully(position, buffer, 0, buffer.length);

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java

@@ -55,6 +55,7 @@ abstract public class FSOutputSummer extends OutputStream {
   throws IOException;
   throws IOException;
 
 
   /** Write one byte */
   /** Write one byte */
+  @Override
   public synchronized void write(int b) throws IOException {
   public synchronized void write(int b) throws IOException {
     sum.update(b);
     sum.update(b);
     buf[count++] = (byte)b;
     buf[count++] = (byte)b;
@@ -81,6 +82,7 @@ abstract public class FSOutputSummer extends OutputStream {
    * @param      len   the number of bytes to write.
    * @param      len   the number of bytes to write.
    * @exception  IOException  if an I/O error occurs.
    * @exception  IOException  if an I/O error occurs.
    */
    */
+  @Override
   public synchronized void write(byte b[], int off, int len)
   public synchronized void write(byte b[], int off, int len)
   throws IOException {
   throws IOException {
     if (off < 0 || len < 0 || off > b.length - len) {
     if (off < 0 || len < 0 || off > b.length - len) {

+ 2 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java

@@ -37,6 +37,7 @@ public abstract class FileChecksum implements Writable {
   public abstract byte[] getBytes();
   public abstract byte[] getBytes();
 
 
   /** Return true if both the algorithms and the values are the same. */
   /** Return true if both the algorithms and the values are the same. */
+  @Override
   public boolean equals(Object other) {
   public boolean equals(Object other) {
     if (other == this) {
     if (other == this) {
       return true;
       return true;
@@ -50,7 +51,7 @@ public abstract class FileChecksum implements Writable {
       && Arrays.equals(this.getBytes(), that.getBytes());
       && Arrays.equals(this.getBytes(), that.getBytes());
   }
   }
   
   
-  /** {@inheritDoc} */
+  @Override
   public int hashCode() {
   public int hashCode() {
     return getAlgorithmName().hashCode() ^ Arrays.hashCode(getBytes());
     return getAlgorithmName().hashCode() ^ Arrays.hashCode(getBytes());
   }
   }

+ 26 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java

@@ -190,6 +190,7 @@ public final class FileContext {
     new FileContextFinalizer();
     new FileContextFinalizer();
   
   
   private static final PathFilter DEFAULT_FILTER = new PathFilter() {
   private static final PathFilter DEFAULT_FILTER = new PathFilter() {
+    @Override
     public boolean accept(final Path file) {
     public boolean accept(final Path file) {
       return true;
       return true;
     }
     }
@@ -318,6 +319,7 @@ public final class FileContext {
       throws UnsupportedFileSystemException, IOException {
       throws UnsupportedFileSystemException, IOException {
     try {
     try {
       return user.doAs(new PrivilegedExceptionAction<AbstractFileSystem>() {
       return user.doAs(new PrivilegedExceptionAction<AbstractFileSystem>() {
+        @Override
         public AbstractFileSystem run() throws UnsupportedFileSystemException {
         public AbstractFileSystem run() throws UnsupportedFileSystemException {
           return AbstractFileSystem.get(uri, conf);
           return AbstractFileSystem.get(uri, conf);
         }
         }
@@ -660,6 +662,7 @@ public final class FileContext {
     final CreateOpts[] updatedOpts = 
     final CreateOpts[] updatedOpts = 
                       CreateOpts.setOpt(CreateOpts.perms(permission), opts);
                       CreateOpts.setOpt(CreateOpts.perms(permission), opts);
     return new FSLinkResolver<FSDataOutputStream>() {
     return new FSLinkResolver<FSDataOutputStream>() {
+      @Override
       public FSDataOutputStream next(final AbstractFileSystem fs, final Path p) 
       public FSDataOutputStream next(final AbstractFileSystem fs, final Path p) 
         throws IOException {
         throws IOException {
         return fs.create(p, createFlag, updatedOpts);
         return fs.create(p, createFlag, updatedOpts);
@@ -703,6 +706,7 @@ public final class FileContext {
     final FsPermission absFerms = (permission == null ? 
     final FsPermission absFerms = (permission == null ? 
           FsPermission.getDefault() : permission).applyUMask(umask);
           FsPermission.getDefault() : permission).applyUMask(umask);
     new FSLinkResolver<Void>() {
     new FSLinkResolver<Void>() {
+      @Override
       public Void next(final AbstractFileSystem fs, final Path p) 
       public Void next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         throws IOException, UnresolvedLinkException {
         fs.mkdir(p, absFerms, createParent);
         fs.mkdir(p, absFerms, createParent);
@@ -738,6 +742,7 @@ public final class FileContext {
       UnsupportedFileSystemException, IOException {
       UnsupportedFileSystemException, IOException {
     Path absF = fixRelativePart(f);
     Path absF = fixRelativePart(f);
     return new FSLinkResolver<Boolean>() {
     return new FSLinkResolver<Boolean>() {
+      @Override
       public Boolean next(final AbstractFileSystem fs, final Path p) 
       public Boolean next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         throws IOException, UnresolvedLinkException {
         return Boolean.valueOf(fs.delete(p, recursive));
         return Boolean.valueOf(fs.delete(p, recursive));
@@ -766,6 +771,7 @@ public final class FileContext {
       FileNotFoundException, UnsupportedFileSystemException, IOException {
       FileNotFoundException, UnsupportedFileSystemException, IOException {
     final Path absF = fixRelativePart(f);
     final Path absF = fixRelativePart(f);
     return new FSLinkResolver<FSDataInputStream>() {
     return new FSLinkResolver<FSDataInputStream>() {
+      @Override
       public FSDataInputStream next(final AbstractFileSystem fs, final Path p) 
       public FSDataInputStream next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         throws IOException, UnresolvedLinkException {
         return fs.open(p);
         return fs.open(p);
@@ -796,6 +802,7 @@ public final class FileContext {
       UnsupportedFileSystemException, IOException {
       UnsupportedFileSystemException, IOException {
     final Path absF = fixRelativePart(f);
     final Path absF = fixRelativePart(f);
     return new FSLinkResolver<FSDataInputStream>() {
     return new FSLinkResolver<FSDataInputStream>() {
+      @Override
       public FSDataInputStream next(final AbstractFileSystem fs, final Path p) 
       public FSDataInputStream next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         throws IOException, UnresolvedLinkException {
         return fs.open(p, bufferSize);
         return fs.open(p, bufferSize);
@@ -826,6 +833,7 @@ public final class FileContext {
       IOException {
       IOException {
     final Path absF = fixRelativePart(f);
     final Path absF = fixRelativePart(f);
     return new FSLinkResolver<Boolean>() {
     return new FSLinkResolver<Boolean>() {
+      @Override
       public Boolean next(final AbstractFileSystem fs, final Path p) 
       public Boolean next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         throws IOException, UnresolvedLinkException {
         return Boolean.valueOf(fs.setReplication(p, replication));
         return Boolean.valueOf(fs.setReplication(p, replication));
@@ -894,6 +902,7 @@ public final class FileContext {
        */
        */
       final Path source = resolveIntermediate(absSrc);    
       final Path source = resolveIntermediate(absSrc);    
       new FSLinkResolver<Void>() {
       new FSLinkResolver<Void>() {
+        @Override
         public Void next(final AbstractFileSystem fs, final Path p) 
         public Void next(final AbstractFileSystem fs, final Path p) 
           throws IOException, UnresolvedLinkException {
           throws IOException, UnresolvedLinkException {
           fs.rename(source, p, options);
           fs.rename(source, p, options);
@@ -925,6 +934,7 @@ public final class FileContext {
       UnsupportedFileSystemException, IOException {
       UnsupportedFileSystemException, IOException {
     final Path absF = fixRelativePart(f);
     final Path absF = fixRelativePart(f);
     new FSLinkResolver<Void>() {
     new FSLinkResolver<Void>() {
+      @Override
       public Void next(final AbstractFileSystem fs, final Path p) 
       public Void next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         throws IOException, UnresolvedLinkException {
         fs.setPermission(p, permission);
         fs.setPermission(p, permission);
@@ -967,6 +977,7 @@ public final class FileContext {
     }
     }
     final Path absF = fixRelativePart(f);
     final Path absF = fixRelativePart(f);
     new FSLinkResolver<Void>() {
     new FSLinkResolver<Void>() {
+      @Override
       public Void next(final AbstractFileSystem fs, final Path p) 
       public Void next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         throws IOException, UnresolvedLinkException {
         fs.setOwner(p, username, groupname);
         fs.setOwner(p, username, groupname);
@@ -1002,6 +1013,7 @@ public final class FileContext {
       UnsupportedFileSystemException, IOException {
       UnsupportedFileSystemException, IOException {
     final Path absF = fixRelativePart(f);
     final Path absF = fixRelativePart(f);
     new FSLinkResolver<Void>() {
     new FSLinkResolver<Void>() {
+      @Override
       public Void next(final AbstractFileSystem fs, final Path p) 
       public Void next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         throws IOException, UnresolvedLinkException {
         fs.setTimes(p, mtime, atime);
         fs.setTimes(p, mtime, atime);
@@ -1034,6 +1046,7 @@ public final class FileContext {
       IOException {
       IOException {
     final Path absF = fixRelativePart(f);
     final Path absF = fixRelativePart(f);
     return new FSLinkResolver<FileChecksum>() {
     return new FSLinkResolver<FileChecksum>() {
+      @Override
       public FileChecksum next(final AbstractFileSystem fs, final Path p) 
       public FileChecksum next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         throws IOException, UnresolvedLinkException {
         return fs.getFileChecksum(p);
         return fs.getFileChecksum(p);
@@ -1089,6 +1102,7 @@ public final class FileContext {
       FileNotFoundException, UnsupportedFileSystemException, IOException {
       FileNotFoundException, UnsupportedFileSystemException, IOException {
     final Path absF = fixRelativePart(f);
     final Path absF = fixRelativePart(f);
     return new FSLinkResolver<FileStatus>() {
     return new FSLinkResolver<FileStatus>() {
+      @Override
       public FileStatus next(final AbstractFileSystem fs, final Path p) 
       public FileStatus next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         throws IOException, UnresolvedLinkException {
         return fs.getFileStatus(p);
         return fs.getFileStatus(p);
@@ -1135,6 +1149,7 @@ public final class FileContext {
       UnsupportedFileSystemException, IOException {
       UnsupportedFileSystemException, IOException {
     final Path absF = fixRelativePart(f);
     final Path absF = fixRelativePart(f);
     return new FSLinkResolver<FileStatus>() {
     return new FSLinkResolver<FileStatus>() {
+      @Override
       public FileStatus next(final AbstractFileSystem fs, final Path p) 
       public FileStatus next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         throws IOException, UnresolvedLinkException {
         FileStatus fi = fs.getFileLinkStatus(p);
         FileStatus fi = fs.getFileLinkStatus(p);
@@ -1165,6 +1180,7 @@ public final class FileContext {
       FileNotFoundException, UnsupportedFileSystemException, IOException {
       FileNotFoundException, UnsupportedFileSystemException, IOException {
     final Path absF = fixRelativePart(f);
     final Path absF = fixRelativePart(f);
     return new FSLinkResolver<Path>() {
     return new FSLinkResolver<Path>() {
+      @Override
       public Path next(final AbstractFileSystem fs, final Path p) 
       public Path next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         throws IOException, UnresolvedLinkException {
         FileStatus fi = fs.getFileLinkStatus(p);
         FileStatus fi = fs.getFileLinkStatus(p);
@@ -1208,6 +1224,7 @@ public final class FileContext {
       UnsupportedFileSystemException, IOException {
       UnsupportedFileSystemException, IOException {
     final Path absF = fixRelativePart(f);
     final Path absF = fixRelativePart(f);
     return new FSLinkResolver<BlockLocation[]>() {
     return new FSLinkResolver<BlockLocation[]>() {
+      @Override
       public BlockLocation[] next(final AbstractFileSystem fs, final Path p) 
       public BlockLocation[] next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         throws IOException, UnresolvedLinkException {
         return fs.getFileBlockLocations(p, start, len);
         return fs.getFileBlockLocations(p, start, len);
@@ -1246,6 +1263,7 @@ public final class FileContext {
     }
     }
     final Path absF = fixRelativePart(f);
     final Path absF = fixRelativePart(f);
     return new FSLinkResolver<FsStatus>() {
     return new FSLinkResolver<FsStatus>() {
+      @Override
       public FsStatus next(final AbstractFileSystem fs, final Path p) 
       public FsStatus next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         throws IOException, UnresolvedLinkException {
         return fs.getFsStatus(p);
         return fs.getFsStatus(p);
@@ -1339,6 +1357,7 @@ public final class FileContext {
       IOException { 
       IOException { 
     final Path nonRelLink = fixRelativePart(link);
     final Path nonRelLink = fixRelativePart(link);
     new FSLinkResolver<Void>() {
     new FSLinkResolver<Void>() {
+      @Override
       public Void next(final AbstractFileSystem fs, final Path p) 
       public Void next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         throws IOException, UnresolvedLinkException {
         fs.createSymlink(target, p, createParent);
         fs.createSymlink(target, p, createParent);
@@ -1373,6 +1392,7 @@ public final class FileContext {
       UnsupportedFileSystemException, IOException {
       UnsupportedFileSystemException, IOException {
     final Path absF = fixRelativePart(f);
     final Path absF = fixRelativePart(f);
     return new FSLinkResolver<RemoteIterator<FileStatus>>() {
     return new FSLinkResolver<RemoteIterator<FileStatus>>() {
+      @Override
       public RemoteIterator<FileStatus> next(
       public RemoteIterator<FileStatus> next(
           final AbstractFileSystem fs, final Path p) 
           final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         throws IOException, UnresolvedLinkException {
@@ -1432,6 +1452,7 @@ public final class FileContext {
       UnsupportedFileSystemException, IOException {
       UnsupportedFileSystemException, IOException {
     final Path absF = fixRelativePart(f);
     final Path absF = fixRelativePart(f);
     return new FSLinkResolver<RemoteIterator<LocatedFileStatus>>() {
     return new FSLinkResolver<RemoteIterator<LocatedFileStatus>>() {
+      @Override
       public RemoteIterator<LocatedFileStatus> next(
       public RemoteIterator<LocatedFileStatus> next(
           final AbstractFileSystem fs, final Path p) 
           final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         throws IOException, UnresolvedLinkException {
@@ -1703,6 +1724,7 @@ public final class FileContext {
         IOException {
         IOException {
       final Path absF = fixRelativePart(f);
       final Path absF = fixRelativePart(f);
       return new FSLinkResolver<FileStatus[]>() {
       return new FSLinkResolver<FileStatus[]>() {
+        @Override
         public FileStatus[] next(final AbstractFileSystem fs, final Path p) 
         public FileStatus[] next(final AbstractFileSystem fs, final Path p) 
           throws IOException, UnresolvedLinkException {
           throws IOException, UnresolvedLinkException {
           return fs.listStatus(p);
           return fs.listStatus(p);
@@ -2232,6 +2254,7 @@ public final class FileContext {
    * Deletes all the paths in deleteOnExit on JVM shutdown.
    * Deletes all the paths in deleteOnExit on JVM shutdown.
    */
    */
   static class FileContextFinalizer implements Runnable {
   static class FileContextFinalizer implements Runnable {
+    @Override
     public synchronized void run() {
     public synchronized void run() {
       processDeleteOnExit();
       processDeleteOnExit();
     }
     }
@@ -2244,6 +2267,7 @@ public final class FileContext {
   protected Path resolve(final Path f) throws FileNotFoundException,
   protected Path resolve(final Path f) throws FileNotFoundException,
       UnresolvedLinkException, AccessControlException, IOException {
       UnresolvedLinkException, AccessControlException, IOException {
     return new FSLinkResolver<Path>() {
     return new FSLinkResolver<Path>() {
+      @Override
       public Path next(final AbstractFileSystem fs, final Path p) 
       public Path next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         throws IOException, UnresolvedLinkException {
         return fs.resolvePath(p);
         return fs.resolvePath(p);
@@ -2259,6 +2283,7 @@ public final class FileContext {
    */
    */
   protected Path resolveIntermediate(final Path f) throws IOException {
   protected Path resolveIntermediate(final Path f) throws IOException {
     return new FSLinkResolver<FileStatus>() {
     return new FSLinkResolver<FileStatus>() {
+      @Override
       public FileStatus next(final AbstractFileSystem fs, final Path p) 
       public FileStatus next(final AbstractFileSystem fs, final Path p) 
         throws IOException, UnresolvedLinkException {
         throws IOException, UnresolvedLinkException {
         return fs.getFileLinkStatus(p);
         return fs.getFileLinkStatus(p);
@@ -2281,6 +2306,7 @@ public final class FileContext {
     final HashSet<AbstractFileSystem> result 
     final HashSet<AbstractFileSystem> result 
       = new HashSet<AbstractFileSystem>();
       = new HashSet<AbstractFileSystem>();
     new FSLinkResolver<Void>() {
     new FSLinkResolver<Void>() {
+      @Override
       public Void next(final AbstractFileSystem fs, final Path p)
       public Void next(final AbstractFileSystem fs, final Path p)
           throws IOException, UnresolvedLinkException {
           throws IOException, UnresolvedLinkException {
         result.add(fs);
         result.add(fs);

+ 5 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java

@@ -253,6 +253,7 @@ public class FileStatus implements Writable, Comparable {
   //////////////////////////////////////////////////
   //////////////////////////////////////////////////
   // Writable
   // Writable
   //////////////////////////////////////////////////
   //////////////////////////////////////////////////
+  @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     Text.writeString(out, getPath().toString(), Text.DEFAULT_MAX_LEN);
     Text.writeString(out, getPath().toString(), Text.DEFAULT_MAX_LEN);
     out.writeLong(getLen());
     out.writeLong(getLen());
@@ -270,6 +271,7 @@ public class FileStatus implements Writable, Comparable {
     }
     }
   }
   }
 
 
+  @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     String strPath = Text.readString(in, Text.DEFAULT_MAX_LEN);
     String strPath = Text.readString(in, Text.DEFAULT_MAX_LEN);
     this.path = new Path(strPath);
     this.path = new Path(strPath);
@@ -299,6 +301,7 @@ public class FileStatus implements Writable, Comparable {
    * @throws ClassCastException if the specified object's is not of 
    * @throws ClassCastException if the specified object's is not of 
    *         type FileStatus
    *         type FileStatus
    */
    */
+  @Override
   public int compareTo(Object o) {
   public int compareTo(Object o) {
     FileStatus other = (FileStatus)o;
     FileStatus other = (FileStatus)o;
     return this.getPath().compareTo(other.getPath());
     return this.getPath().compareTo(other.getPath());
@@ -308,6 +311,7 @@ public class FileStatus implements Writable, Comparable {
    * @param   o the object to be compared.
    * @param   o the object to be compared.
    * @return  true if two file status has the same path name; false if not.
    * @return  true if two file status has the same path name; false if not.
    */
    */
+  @Override
   public boolean equals(Object o) {
   public boolean equals(Object o) {
     if (o == null) {
     if (o == null) {
       return false;
       return false;
@@ -328,6 +332,7 @@ public class FileStatus implements Writable, Comparable {
    *
    *
    * @return  a hash code value for the path name.
    * @return  a hash code value for the path name.
    */
    */
+  @Override
   public int hashCode() {
   public int hashCode() {
     return getPath().hashCode();
     return getPath().hashCode();
   }
   }

+ 9 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java

@@ -147,6 +147,7 @@ public abstract class FileSystem extends Configured implements Closeable {
     UserGroupInformation ugi =
     UserGroupInformation ugi =
         UserGroupInformation.getBestUGI(ticketCachePath, user);
         UserGroupInformation.getBestUGI(ticketCachePath, user);
     return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
     return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
+      @Override
       public FileSystem run() throws IOException {
       public FileSystem run() throws IOException {
         return get(uri, conf);
         return get(uri, conf);
       }
       }
@@ -332,6 +333,7 @@ public abstract class FileSystem extends Configured implements Closeable {
     UserGroupInformation ugi =
     UserGroupInformation ugi =
         UserGroupInformation.getBestUGI(ticketCachePath, user);
         UserGroupInformation.getBestUGI(ticketCachePath, user);
     return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
     return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
+      @Override
       public FileSystem run() throws IOException {
       public FileSystem run() throws IOException {
         return newInstance(uri,conf); 
         return newInstance(uri,conf); 
       }
       }
@@ -1389,6 +1391,7 @@ public abstract class FileSystem extends Configured implements Closeable {
   }
   }
 
 
   final private static PathFilter DEFAULT_FILTER = new PathFilter() {
   final private static PathFilter DEFAULT_FILTER = new PathFilter() {
+      @Override
       public boolean accept(Path file) {
       public boolean accept(Path file) {
         return true;
         return true;
       }     
       }     
@@ -2056,6 +2059,7 @@ public abstract class FileSystem extends Configured implements Closeable {
    * No more filesystem operations are needed.  Will
    * No more filesystem operations are needed.  Will
    * release any held locks.
    * release any held locks.
    */
    */
+  @Override
   public void close() throws IOException {
   public void close() throws IOException {
     // delete all files that were marked as delete-on-exit.
     // delete all files that were marked as delete-on-exit.
     processDeleteOnExit();
     processDeleteOnExit();
@@ -2393,6 +2397,7 @@ public abstract class FileSystem extends Configured implements Closeable {
     }
     }
 
 
     private class ClientFinalizer implements Runnable {
     private class ClientFinalizer implements Runnable {
+      @Override
       public synchronized void run() {
       public synchronized void run() {
         try {
         try {
           closeAll(true);
           closeAll(true);
@@ -2447,7 +2452,7 @@ public abstract class FileSystem extends Configured implements Closeable {
         this.ugi = UserGroupInformation.getCurrentUser();
         this.ugi = UserGroupInformation.getCurrentUser();
       }
       }
 
 
-      /** {@inheritDoc} */
+      @Override
       public int hashCode() {
       public int hashCode() {
         return (scheme + authority).hashCode() + ugi.hashCode() + (int)unique;
         return (scheme + authority).hashCode() + ugi.hashCode() + (int)unique;
       }
       }
@@ -2456,7 +2461,7 @@ public abstract class FileSystem extends Configured implements Closeable {
         return a == b || (a != null && a.equals(b));        
         return a == b || (a != null && a.equals(b));        
       }
       }
 
 
-      /** {@inheritDoc} */
+      @Override
       public boolean equals(Object obj) {
       public boolean equals(Object obj) {
         if (obj == this) {
         if (obj == this) {
           return true;
           return true;
@@ -2471,7 +2476,7 @@ public abstract class FileSystem extends Configured implements Closeable {
         return false;        
         return false;        
       }
       }
 
 
-      /** {@inheritDoc} */
+      @Override
       public String toString() {
       public String toString() {
         return "("+ugi.toString() + ")@" + scheme + "://" + authority;        
         return "("+ugi.toString() + ")@" + scheme + "://" + authority;        
       }
       }
@@ -2584,6 +2589,7 @@ public abstract class FileSystem extends Configured implements Closeable {
       return writeOps.get();
       return writeOps.get();
     }
     }
 
 
+    @Override
     public String toString() {
     public String toString() {
       return bytesRead + " bytes read, " + bytesWritten + " bytes written, "
       return bytesRead + " bytes read, " + bytesWritten + " bytes written, "
           + readOps + " read ops, " + largeReadOps + " large read ops, "
           + readOps + " read ops, " + largeReadOps + " large read ops, "

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java

@@ -414,9 +414,11 @@ public class FileUtil {
     String getResult() throws IOException {
     String getResult() throws IOException {
       return result;
       return result;
     }
     }
+    @Override
     protected String[] getExecString() {
     protected String[] getExecString() {
       return command;
       return command;
     }
     }
+    @Override
     protected void parseExecResult(BufferedReader lines) throws IOException {
     protected void parseExecResult(BufferedReader lines) throws IOException {
       String line = lines.readLine();
       String line = lines.readLine();
       if (line == null) {
       if (line == null) {

+ 26 - 12
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java

@@ -76,6 +76,7 @@ public class FilterFileSystem extends FileSystem {
    *   for this FileSystem
    *   for this FileSystem
    * @param conf the configuration
    * @param conf the configuration
    */
    */
+  @Override
   public void initialize(URI name, Configuration conf) throws IOException {
   public void initialize(URI name, Configuration conf) throws IOException {
     super.initialize(name, conf);
     super.initialize(name, conf);
     // this is less than ideal, but existing filesystems sometimes neglect
     // this is less than ideal, but existing filesystems sometimes neglect
@@ -90,6 +91,7 @@ public class FilterFileSystem extends FileSystem {
   }
   }
 
 
   /** Returns a URI whose scheme and authority identify this FileSystem.*/
   /** Returns a URI whose scheme and authority identify this FileSystem.*/
+  @Override
   public URI getUri() {
   public URI getUri() {
     return fs.getUri();
     return fs.getUri();
   }
   }
@@ -104,6 +106,7 @@ public class FilterFileSystem extends FileSystem {
   }
   }
   
   
   /** Make sure that a path specifies a FileSystem. */
   /** Make sure that a path specifies a FileSystem. */
+  @Override
   public Path makeQualified(Path path) {
   public Path makeQualified(Path path) {
     Path fqPath = fs.makeQualified(path);
     Path fqPath = fs.makeQualified(path);
     // swap in our scheme if the filtered fs is using a different scheme
     // swap in our scheme if the filtered fs is using a different scheme
@@ -125,10 +128,12 @@ public class FilterFileSystem extends FileSystem {
   ///////////////////////////////////////////////////////////////
   ///////////////////////////////////////////////////////////////
 
 
   /** Check that a Path belongs to this FileSystem. */
   /** Check that a Path belongs to this FileSystem. */
+  @Override
   protected void checkPath(Path path) {
   protected void checkPath(Path path) {
     fs.checkPath(path);
     fs.checkPath(path);
   }
   }
 
 
+  @Override
   public BlockLocation[] getFileBlockLocations(FileStatus file, long start,
   public BlockLocation[] getFileBlockLocations(FileStatus file, long start,
     long len) throws IOException {
     long len) throws IOException {
       return fs.getFileBlockLocations(file, start, len);
       return fs.getFileBlockLocations(file, start, len);
@@ -143,17 +148,17 @@ public class FilterFileSystem extends FileSystem {
    * @param f the file name to open
    * @param f the file name to open
    * @param bufferSize the size of the buffer to be used.
    * @param bufferSize the size of the buffer to be used.
    */
    */
+  @Override
   public FSDataInputStream open(Path f, int bufferSize) throws IOException {
   public FSDataInputStream open(Path f, int bufferSize) throws IOException {
     return fs.open(f, bufferSize);
     return fs.open(f, bufferSize);
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public FSDataOutputStream append(Path f, int bufferSize,
   public FSDataOutputStream append(Path f, int bufferSize,
       Progressable progress) throws IOException {
       Progressable progress) throws IOException {
     return fs.append(f, bufferSize, progress);
     return fs.append(f, bufferSize, progress);
   }
   }
 
 
-  /** {@inheritDoc} */
   @Override
   @Override
   public FSDataOutputStream create(Path f, FsPermission permission,
   public FSDataOutputStream create(Path f, FsPermission permission,
       boolean overwrite, int bufferSize, short replication, long blockSize,
       boolean overwrite, int bufferSize, short replication, long blockSize,
@@ -171,6 +176,7 @@ public class FilterFileSystem extends FileSystem {
    * @return true if successful;
    * @return true if successful;
    *         false if file does not exist or is a directory
    *         false if file does not exist or is a directory
    */
    */
+  @Override
   public boolean setReplication(Path src, short replication) throws IOException {
   public boolean setReplication(Path src, short replication) throws IOException {
     return fs.setReplication(src, replication);
     return fs.setReplication(src, replication);
   }
   }
@@ -179,23 +185,23 @@ public class FilterFileSystem extends FileSystem {
    * Renames Path src to Path dst.  Can take place on local fs
    * Renames Path src to Path dst.  Can take place on local fs
    * or remote DFS.
    * or remote DFS.
    */
    */
+  @Override
   public boolean rename(Path src, Path dst) throws IOException {
   public boolean rename(Path src, Path dst) throws IOException {
     return fs.rename(src, dst);
     return fs.rename(src, dst);
   }
   }
   
   
   /** Delete a file */
   /** Delete a file */
+  @Override
   public boolean delete(Path f, boolean recursive) throws IOException {
   public boolean delete(Path f, boolean recursive) throws IOException {
     return fs.delete(f, recursive);
     return fs.delete(f, recursive);
   }
   }
   
   
   /** List files in a directory. */
   /** List files in a directory. */
+  @Override
   public FileStatus[] listStatus(Path f) throws IOException {
   public FileStatus[] listStatus(Path f) throws IOException {
     return fs.listStatus(f);
     return fs.listStatus(f);
   }
   }
 
 
-  /**
-   * {@inheritDoc}
-   */
   @Override
   @Override
   public RemoteIterator<Path> listCorruptFileBlocks(Path path)
   public RemoteIterator<Path> listCorruptFileBlocks(Path path)
     throws IOException {
     throws IOException {
@@ -203,11 +209,13 @@ public class FilterFileSystem extends FileSystem {
   }
   }
 
 
   /** List files and its block locations in a directory. */
   /** List files and its block locations in a directory. */
+  @Override
   public RemoteIterator<LocatedFileStatus> listLocatedStatus(Path f)
   public RemoteIterator<LocatedFileStatus> listLocatedStatus(Path f)
   throws IOException {
   throws IOException {
     return fs.listLocatedStatus(f);
     return fs.listLocatedStatus(f);
   }
   }
   
   
+  @Override
   public Path getHomeDirectory() {
   public Path getHomeDirectory() {
     return fs.getHomeDirectory();
     return fs.getHomeDirectory();
   }
   }
@@ -219,6 +227,7 @@ public class FilterFileSystem extends FileSystem {
    * 
    * 
    * @param newDir
    * @param newDir
    */
    */
+  @Override
   public void setWorkingDirectory(Path newDir) {
   public void setWorkingDirectory(Path newDir) {
     fs.setWorkingDirectory(newDir);
     fs.setWorkingDirectory(newDir);
   }
   }
@@ -228,21 +237,21 @@ public class FilterFileSystem extends FileSystem {
    * 
    * 
    * @return the directory pathname
    * @return the directory pathname
    */
    */
+  @Override
   public Path getWorkingDirectory() {
   public Path getWorkingDirectory() {
     return fs.getWorkingDirectory();
     return fs.getWorkingDirectory();
   }
   }
   
   
+  @Override
   protected Path getInitialWorkingDirectory() {
   protected Path getInitialWorkingDirectory() {
     return fs.getInitialWorkingDirectory();
     return fs.getInitialWorkingDirectory();
   }
   }
   
   
-  /** {@inheritDoc} */
   @Override
   @Override
   public FsStatus getStatus(Path p) throws IOException {
   public FsStatus getStatus(Path p) throws IOException {
     return fs.getStatus(p);
     return fs.getStatus(p);
   }
   }
   
   
-  /** {@inheritDoc} */
   @Override
   @Override
   public boolean mkdirs(Path f, FsPermission permission) throws IOException {
   public boolean mkdirs(Path f, FsPermission permission) throws IOException {
     return fs.mkdirs(f, permission);
     return fs.mkdirs(f, permission);
@@ -254,6 +263,7 @@ public class FilterFileSystem extends FileSystem {
    * the given dst name.
    * the given dst name.
    * delSrc indicates if the source should be removed
    * delSrc indicates if the source should be removed
    */
    */
+  @Override
   public void copyFromLocalFile(boolean delSrc, Path src, Path dst)
   public void copyFromLocalFile(boolean delSrc, Path src, Path dst)
     throws IOException {
     throws IOException {
     fs.copyFromLocalFile(delSrc, src, dst);
     fs.copyFromLocalFile(delSrc, src, dst);
@@ -264,6 +274,7 @@ public class FilterFileSystem extends FileSystem {
    * the given dst name.
    * the given dst name.
    * delSrc indicates if the source should be removed
    * delSrc indicates if the source should be removed
    */
    */
+  @Override
   public void copyFromLocalFile(boolean delSrc, boolean overwrite, 
   public void copyFromLocalFile(boolean delSrc, boolean overwrite, 
                                 Path[] srcs, Path dst)
                                 Path[] srcs, Path dst)
     throws IOException {
     throws IOException {
@@ -275,6 +286,7 @@ public class FilterFileSystem extends FileSystem {
    * the given dst name.
    * the given dst name.
    * delSrc indicates if the source should be removed
    * delSrc indicates if the source should be removed
    */
    */
+  @Override
   public void copyFromLocalFile(boolean delSrc, boolean overwrite, 
   public void copyFromLocalFile(boolean delSrc, boolean overwrite, 
                                 Path src, Path dst)
                                 Path src, Path dst)
     throws IOException {
     throws IOException {
@@ -286,6 +298,7 @@ public class FilterFileSystem extends FileSystem {
    * Copy it from FS control to the local dst name.
    * Copy it from FS control to the local dst name.
    * delSrc indicates if the src will be removed or not.
    * delSrc indicates if the src will be removed or not.
    */   
    */   
+  @Override
   public void copyToLocalFile(boolean delSrc, Path src, Path dst)
   public void copyToLocalFile(boolean delSrc, Path src, Path dst)
     throws IOException {
     throws IOException {
     fs.copyToLocalFile(delSrc, src, dst);
     fs.copyToLocalFile(delSrc, src, dst);
@@ -297,6 +310,7 @@ public class FilterFileSystem extends FileSystem {
    * file.  If the FS is local, we write directly into the target.  If
    * file.  If the FS is local, we write directly into the target.  If
    * the FS is remote, we write into the tmp local area.
    * the FS is remote, we write into the tmp local area.
    */
    */
+  @Override
   public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile)
   public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile)
     throws IOException {
     throws IOException {
     return fs.startLocalOutput(fsOutputFile, tmpLocalFile);
     return fs.startLocalOutput(fsOutputFile, tmpLocalFile);
@@ -308,12 +322,14 @@ public class FilterFileSystem extends FileSystem {
    * FS will copy the contents of tmpLocalFile to the correct target at
    * FS will copy the contents of tmpLocalFile to the correct target at
    * fsOutputFile.
    * fsOutputFile.
    */
    */
+  @Override
   public void completeLocalOutput(Path fsOutputFile, Path tmpLocalFile)
   public void completeLocalOutput(Path fsOutputFile, Path tmpLocalFile)
     throws IOException {
     throws IOException {
     fs.completeLocalOutput(fsOutputFile, tmpLocalFile);
     fs.completeLocalOutput(fsOutputFile, tmpLocalFile);
   }
   }
 
 
   /** Return the total size of all files in the filesystem.*/
   /** Return the total size of all files in the filesystem.*/
+  @Override
   public long getUsed() throws IOException{
   public long getUsed() throws IOException{
     return fs.getUsed();
     return fs.getUsed();
   }
   }
@@ -357,16 +373,17 @@ public class FilterFileSystem extends FileSystem {
   /**
   /**
    * Get file status.
    * Get file status.
    */
    */
+  @Override
   public FileStatus getFileStatus(Path f) throws IOException {
   public FileStatus getFileStatus(Path f) throws IOException {
     return fs.getFileStatus(f);
     return fs.getFileStatus(f);
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public FileChecksum getFileChecksum(Path f) throws IOException {
   public FileChecksum getFileChecksum(Path f) throws IOException {
     return fs.getFileChecksum(f);
     return fs.getFileChecksum(f);
   }
   }
   
   
-  /** {@inheritDoc} */
+  @Override
   public void setVerifyChecksum(boolean verifyChecksum) {
   public void setVerifyChecksum(boolean verifyChecksum) {
     fs.setVerifyChecksum(verifyChecksum);
     fs.setVerifyChecksum(verifyChecksum);
   }
   }
@@ -387,21 +404,18 @@ public class FilterFileSystem extends FileSystem {
     fs.close();
     fs.close();
   }
   }
 
 
-  /** {@inheritDoc} */
   @Override
   @Override
   public void setOwner(Path p, String username, String groupname
   public void setOwner(Path p, String username, String groupname
       ) throws IOException {
       ) throws IOException {
     fs.setOwner(p, username, groupname);
     fs.setOwner(p, username, groupname);
   }
   }
 
 
-  /** {@inheritDoc} */
   @Override
   @Override
   public void setTimes(Path p, long mtime, long atime
   public void setTimes(Path p, long mtime, long atime
       ) throws IOException {
       ) throws IOException {
     fs.setTimes(p, mtime, atime);
     fs.setTimes(p, mtime, atime);
   }
   }
 
 
-  /** {@inheritDoc} */
   @Override
   @Override
   public void setPermission(Path p, FsPermission permission
   public void setPermission(Path p, FsPermission permission
       ) throws IOException {
       ) throws IOException {

+ 0 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java

@@ -174,9 +174,6 @@ public abstract class FilterFs extends AbstractFileSystem {
     return myFs.listStatus(f);
     return myFs.listStatus(f);
   }
   }
 
 
-  /**
-   * {@inheritDoc}
-   */
   @Override
   @Override
   public RemoteIterator<Path> listCorruptFileBlocks(Path path)
   public RemoteIterator<Path> listCorruptFileBlocks(Path path)
     throws IOException {
     throws IOException {

+ 3 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsServerDefaults.java

@@ -39,6 +39,7 @@ public class FsServerDefaults implements Writable {
 
 
   static { // register a ctor
   static { // register a ctor
     WritableFactories.setFactory(FsServerDefaults.class, new WritableFactory() {
     WritableFactories.setFactory(FsServerDefaults.class, new WritableFactory() {
+      @Override
       public Writable newInstance() {
       public Writable newInstance() {
         return new FsServerDefaults();
         return new FsServerDefaults();
       }
       }
@@ -106,6 +107,7 @@ public class FsServerDefaults implements Writable {
   // /////////////////////////////////////////
   // /////////////////////////////////////////
   // Writable
   // Writable
   // /////////////////////////////////////////
   // /////////////////////////////////////////
+  @Override
   @InterfaceAudience.Private
   @InterfaceAudience.Private
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     out.writeLong(blockSize);
     out.writeLong(blockSize);
@@ -116,6 +118,7 @@ public class FsServerDefaults implements Writable {
     WritableUtils.writeEnum(out, checksumType);
     WritableUtils.writeEnum(out, checksumType);
   }
   }
 
 
+  @Override
   @InterfaceAudience.Private
   @InterfaceAudience.Private
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     blockSize = in.readLong();
     blockSize = in.readLong();

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java

@@ -236,6 +236,7 @@ public class FsShell extends Configured implements Tool {
   /**
   /**
    * run
    * run
    */
    */
+  @Override
   public int run(String argv[]) throws Exception {
   public int run(String argv[]) throws Exception {
     // initialize FsShell
     // initialize FsShell
     init();
     init();

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java

@@ -60,12 +60,14 @@ public class FsStatus implements Writable {
   //////////////////////////////////////////////////
   //////////////////////////////////////////////////
   // Writable
   // Writable
   //////////////////////////////////////////////////
   //////////////////////////////////////////////////
+  @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     out.writeLong(capacity);
     out.writeLong(capacity);
     out.writeLong(used);
     out.writeLong(used);
     out.writeLong(remaining);
     out.writeLong(remaining);
   }
   }
 
 
+  @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     capacity = in.readLong();
     capacity = in.readLong();
     used = in.readLong();
     used = in.readLong();

+ 0 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlConnection.java

@@ -53,7 +53,6 @@ class FsUrlConnection extends URLConnection {
     }
     }
   }
   }
 
 
-  /* @inheritDoc */
   @Override
   @Override
   public InputStream getInputStream() throws IOException {
   public InputStream getInputStream() throws IOException {
     if (is == null) {
     if (is == null) {

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java

@@ -59,6 +59,7 @@ public class FsUrlStreamHandlerFactory implements
     this.handler = new FsUrlStreamHandler(this.conf);
     this.handler = new FsUrlStreamHandler(this.conf);
   }
   }
 
 
+  @Override
   public java.net.URLStreamHandler createURLStreamHandler(String protocol) {
   public java.net.URLStreamHandler createURLStreamHandler(String protocol) {
     if (!protocols.containsKey(protocol)) {
     if (!protocols.containsKey(protocol)) {
       boolean known = true;
       boolean known = true;

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobFilter.java

@@ -31,6 +31,7 @@ import org.apache.hadoop.classification.InterfaceStability;
 @InterfaceStability.Evolving
 @InterfaceStability.Evolving
 public class GlobFilter implements PathFilter {
 public class GlobFilter implements PathFilter {
   private final static PathFilter DEFAULT_FILTER = new PathFilter() {
   private final static PathFilter DEFAULT_FILTER = new PathFilter() {
+      @Override
       public boolean accept(Path file) {
       public boolean accept(Path file) {
         return true;
         return true;
       }
       }
@@ -75,6 +76,7 @@ public class GlobFilter implements PathFilter {
     return pattern.hasWildcard();
     return pattern.hasWildcard();
   }
   }
 
 
+  @Override
   public boolean accept(Path path) {
   public boolean accept(Path path) {
     return pattern.matches(path.getName()) && userFilter.accept(path);
     return pattern.matches(path.getName()) && userFilter.accept(path);
   }
   }

+ 26 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java

@@ -106,6 +106,7 @@ public class HarFileSystem extends FilterFileSystem {
    * har:///archivepath. This assumes the underlying filesystem
    * har:///archivepath. This assumes the underlying filesystem
    * to be used in case not specified.
    * to be used in case not specified.
    */
    */
+  @Override
   public void initialize(URI name, Configuration conf) throws IOException {
   public void initialize(URI name, Configuration conf) throws IOException {
     // decode the name
     // decode the name
     URI underLyingURI = decodeHarURI(name, conf);
     URI underLyingURI = decodeHarURI(name, conf);
@@ -247,6 +248,7 @@ public class HarFileSystem extends FilterFileSystem {
   /**
   /**
    * return the top level archive.
    * return the top level archive.
    */
    */
+  @Override
   public Path getWorkingDirectory() {
   public Path getWorkingDirectory() {
     return new Path(uri.toString());
     return new Path(uri.toString());
   }
   }
@@ -636,6 +638,7 @@ public class HarFileSystem extends FilterFileSystem {
   /**
   /**
    * @return null since no checksum algorithm is implemented.
    * @return null since no checksum algorithm is implemented.
    */
    */
+  @Override
   public FileChecksum getFileChecksum(Path f) {
   public FileChecksum getFileChecksum(Path f) {
     return null;
     return null;
   }
   }
@@ -668,6 +671,7 @@ public class HarFileSystem extends FilterFileSystem {
     throw new IOException("Har: Create not allowed");
     throw new IOException("Har: Create not allowed");
   }
   }
   
   
+  @Override
   public FSDataOutputStream create(Path f,
   public FSDataOutputStream create(Path f,
       FsPermission permission,
       FsPermission permission,
       boolean overwrite,
       boolean overwrite,
@@ -735,10 +739,12 @@ public class HarFileSystem extends FilterFileSystem {
   /**
   /**
    * return the top level archive path.
    * return the top level archive path.
    */
    */
+  @Override
   public Path getHomeDirectory() {
   public Path getHomeDirectory() {
     return new Path(uri.toString());
     return new Path(uri.toString());
   }
   }
   
   
+  @Override
   public void setWorkingDirectory(Path newDir) {
   public void setWorkingDirectory(Path newDir) {
     //does nothing.
     //does nothing.
   }
   }
@@ -746,6 +752,7 @@ public class HarFileSystem extends FilterFileSystem {
   /**
   /**
    * not implemented.
    * not implemented.
    */
    */
+  @Override
   public boolean mkdirs(Path f, FsPermission permission) throws IOException {
   public boolean mkdirs(Path f, FsPermission permission) throws IOException {
     throw new IOException("Har: mkdirs not allowed");
     throw new IOException("Har: mkdirs not allowed");
   }
   }
@@ -753,6 +760,7 @@ public class HarFileSystem extends FilterFileSystem {
   /**
   /**
    * not implemented.
    * not implemented.
    */
    */
+  @Override
   public void copyFromLocalFile(boolean delSrc, Path src, Path dst) throws 
   public void copyFromLocalFile(boolean delSrc, Path src, Path dst) throws 
         IOException {
         IOException {
     throw new IOException("Har: copyfromlocalfile not allowed");
     throw new IOException("Har: copyfromlocalfile not allowed");
@@ -761,6 +769,7 @@ public class HarFileSystem extends FilterFileSystem {
   /**
   /**
    * copies the file in the har filesystem to a local file.
    * copies the file in the har filesystem to a local file.
    */
    */
+  @Override
   public void copyToLocalFile(boolean delSrc, Path src, Path dst) 
   public void copyToLocalFile(boolean delSrc, Path src, Path dst) 
     throws IOException {
     throws IOException {
     FileUtil.copy(this, src, getLocal(getConf()), dst, false, getConf());
     FileUtil.copy(this, src, getLocal(getConf()), dst, false, getConf());
@@ -769,6 +778,7 @@ public class HarFileSystem extends FilterFileSystem {
   /**
   /**
    * not implemented.
    * not implemented.
    */
    */
+  @Override
   public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile) 
   public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile) 
     throws IOException {
     throws IOException {
     throw new IOException("Har: startLocalOutput not allowed");
     throw new IOException("Har: startLocalOutput not allowed");
@@ -777,6 +787,7 @@ public class HarFileSystem extends FilterFileSystem {
   /**
   /**
    * not implemented.
    * not implemented.
    */
    */
+  @Override
   public void completeLocalOutput(Path fsOutputFile, Path tmpLocalFile) 
   public void completeLocalOutput(Path fsOutputFile, Path tmpLocalFile) 
     throws IOException {
     throws IOException {
     throw new IOException("Har: completeLocalOutput not allowed");
     throw new IOException("Har: completeLocalOutput not allowed");
@@ -785,6 +796,7 @@ public class HarFileSystem extends FilterFileSystem {
   /**
   /**
    * not implemented.
    * not implemented.
    */
    */
+  @Override
   public void setOwner(Path p, String username, String groupname)
   public void setOwner(Path p, String username, String groupname)
     throws IOException {
     throws IOException {
     throw new IOException("Har: setowner not allowed");
     throw new IOException("Har: setowner not allowed");
@@ -793,6 +805,7 @@ public class HarFileSystem extends FilterFileSystem {
   /**
   /**
    * Not implemented.
    * Not implemented.
    */
    */
+  @Override
   public void setPermission(Path p, FsPermission permisssion) 
   public void setPermission(Path p, FsPermission permisssion) 
     throws IOException {
     throws IOException {
     throw new IOException("Har: setPermission not allowed");
     throw new IOException("Har: setPermission not allowed");
@@ -825,6 +838,7 @@ public class HarFileSystem extends FilterFileSystem {
         this.end = start + length;
         this.end = start + length;
       }
       }
       
       
+      @Override
       public synchronized int available() throws IOException {
       public synchronized int available() throws IOException {
         long remaining = end - underLyingStream.getPos();
         long remaining = end - underLyingStream.getPos();
         if (remaining > (long)Integer.MAX_VALUE) {
         if (remaining > (long)Integer.MAX_VALUE) {
@@ -833,6 +847,7 @@ public class HarFileSystem extends FilterFileSystem {
         return (int) remaining;
         return (int) remaining;
       }
       }
       
       
+      @Override
       public synchronized  void close() throws IOException {
       public synchronized  void close() throws IOException {
         underLyingStream.close();
         underLyingStream.close();
         super.close();
         super.close();
@@ -847,15 +862,18 @@ public class HarFileSystem extends FilterFileSystem {
       /**
       /**
        * reset is not implemented
        * reset is not implemented
        */
        */
+      @Override
       public void reset() throws IOException {
       public void reset() throws IOException {
         throw new IOException("reset not implemented.");
         throw new IOException("reset not implemented.");
       }
       }
       
       
+      @Override
       public synchronized int read() throws IOException {
       public synchronized int read() throws IOException {
         int ret = read(oneBytebuff, 0, 1);
         int ret = read(oneBytebuff, 0, 1);
         return (ret <= 0) ? -1: (oneBytebuff[0] & 0xff);
         return (ret <= 0) ? -1: (oneBytebuff[0] & 0xff);
       }
       }
       
       
+      @Override
       public synchronized int read(byte[] b) throws IOException {
       public synchronized int read(byte[] b) throws IOException {
         int ret = read(b, 0, b.length);
         int ret = read(b, 0, b.length);
         if (ret != -1) {
         if (ret != -1) {
@@ -867,6 +885,7 @@ public class HarFileSystem extends FilterFileSystem {
       /**
       /**
        * 
        * 
        */
        */
+      @Override
       public synchronized int read(byte[] b, int offset, int len) 
       public synchronized int read(byte[] b, int offset, int len) 
         throws IOException {
         throws IOException {
         int newlen = len;
         int newlen = len;
@@ -882,6 +901,7 @@ public class HarFileSystem extends FilterFileSystem {
         return ret;
         return ret;
       }
       }
       
       
+      @Override
       public synchronized long skip(long n) throws IOException {
       public synchronized long skip(long n) throws IOException {
         long tmpN = n;
         long tmpN = n;
         if (tmpN > 0) {
         if (tmpN > 0) {
@@ -895,10 +915,12 @@ public class HarFileSystem extends FilterFileSystem {
         return (tmpN < 0)? -1 : 0;
         return (tmpN < 0)? -1 : 0;
       }
       }
       
       
+      @Override
       public synchronized long getPos() throws IOException {
       public synchronized long getPos() throws IOException {
         return (position - start);
         return (position - start);
       }
       }
       
       
+      @Override
       public synchronized void seek(long pos) throws IOException {
       public synchronized void seek(long pos) throws IOException {
         if (pos < 0 || (start + pos > end)) {
         if (pos < 0 || (start + pos > end)) {
           throw new IOException("Failed to seek: EOF");
           throw new IOException("Failed to seek: EOF");
@@ -907,6 +929,7 @@ public class HarFileSystem extends FilterFileSystem {
         underLyingStream.seek(position);
         underLyingStream.seek(position);
       }
       }
 
 
+      @Override
       public boolean seekToNewSource(long targetPos) throws IOException {
       public boolean seekToNewSource(long targetPos) throws IOException {
         //do not need to implement this
         //do not need to implement this
         // hdfs in itself does seektonewsource 
         // hdfs in itself does seektonewsource 
@@ -917,6 +940,7 @@ public class HarFileSystem extends FilterFileSystem {
       /**
       /**
        * implementing position readable. 
        * implementing position readable. 
        */
        */
+      @Override
       public int read(long pos, byte[] b, int offset, int length) 
       public int read(long pos, byte[] b, int offset, int length) 
       throws IOException {
       throws IOException {
         int nlength = length;
         int nlength = length;
@@ -929,6 +953,7 @@ public class HarFileSystem extends FilterFileSystem {
       /**
       /**
        * position readable again.
        * position readable again.
        */
        */
+      @Override
       public void readFully(long pos, byte[] b, int offset, int length) 
       public void readFully(long pos, byte[] b, int offset, int length) 
       throws IOException {
       throws IOException {
         if (start + length + pos > end) {
         if (start + length + pos > end) {
@@ -937,6 +962,7 @@ public class HarFileSystem extends FilterFileSystem {
         underLyingStream.readFully(pos + start, b, offset, length);
         underLyingStream.readFully(pos + start, b, offset, length);
       }
       }
       
       
+      @Override
       public void readFully(long pos, byte[] b) throws IOException {
       public void readFully(long pos, byte[] b) throws IOException {
           readFully(pos, b, 0, b.length);
           readFully(pos, b, 0, b.length);
       }
       }

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java

@@ -91,6 +91,7 @@ public class LocalFileSystem extends ChecksumFileSystem {
    * Moves files to a bad file directory on the same device, so that their
    * Moves files to a bad file directory on the same device, so that their
    * storage will not be reused.
    * storage will not be reused.
    */
    */
+  @Override
   public boolean reportChecksumFailure(Path p, FSDataInputStream in,
   public boolean reportChecksumFailure(Path p, FSDataInputStream in,
                                        long inPos,
                                        long inPos,
                                        FSDataInputStream sums, long sumsPos) {
                                        FSDataInputStream sums, long sumsPos) {

+ 3 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocatedFileStatus.java

@@ -94,6 +94,7 @@ public class LocatedFileStatus extends FileStatus {
    * @throws ClassCastException if the specified object's is not of 
    * @throws ClassCastException if the specified object's is not of 
    *         type FileStatus
    *         type FileStatus
    */
    */
+  @Override
   public int compareTo(Object o) {
   public int compareTo(Object o) {
     return super.compareTo(o);
     return super.compareTo(o);
   }
   }
@@ -102,6 +103,7 @@ public class LocatedFileStatus extends FileStatus {
    * @param   o the object to be compared.
    * @param   o the object to be compared.
    * @return  true if two file status has the same path name; false if not.
    * @return  true if two file status has the same path name; false if not.
    */
    */
+  @Override
   public boolean equals(Object o) {
   public boolean equals(Object o) {
     return super.equals(o);
     return super.equals(o);
   }
   }
@@ -112,6 +114,7 @@ public class LocatedFileStatus extends FileStatus {
    *
    *
    * @return  a hash code value for the path name.
    * @return  a hash code value for the path name.
    */
    */
+  @Override
   public int hashCode() {
   public int hashCode() {
     return super.hashCode();
     return super.hashCode();
   }
   }

+ 10 - 10
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java

@@ -57,7 +57,7 @@ public class MD5MD5CRC32FileChecksum extends FileChecksum {
     this.md5 = md5;
     this.md5 = md5;
   }
   }
   
   
-  /** {@inheritDoc} */ 
+  @Override
   public String getAlgorithmName() {
   public String getAlgorithmName() {
     return "MD5-of-" + crcPerBlock + "MD5-of-" + bytesPerCRC +
     return "MD5-of-" + crcPerBlock + "MD5-of-" + bytesPerCRC +
         getCrcType().name();
         getCrcType().name();
@@ -73,11 +73,11 @@ public class MD5MD5CRC32FileChecksum extends FileChecksum {
 
 
     throw new IOException("Unknown checksum type in " + algorithm);
     throw new IOException("Unknown checksum type in " + algorithm);
   }
   }
-
-  /** {@inheritDoc} */ 
+ 
+  @Override
   public int getLength() {return LENGTH;}
   public int getLength() {return LENGTH;}
-
-  /** {@inheritDoc} */ 
+ 
+  @Override
   public byte[] getBytes() {
   public byte[] getBytes() {
     return WritableUtils.toByteArray(this);
     return WritableUtils.toByteArray(this);
   }
   }
@@ -92,14 +92,14 @@ public class MD5MD5CRC32FileChecksum extends FileChecksum {
     return new ChecksumOpt(getCrcType(), bytesPerCRC);
     return new ChecksumOpt(getCrcType(), bytesPerCRC);
   }
   }
 
 
-  /** {@inheritDoc} */ 
+  @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     bytesPerCRC = in.readInt();
     bytesPerCRC = in.readInt();
     crcPerBlock = in.readLong();
     crcPerBlock = in.readLong();
     md5 = MD5Hash.read(in);
     md5 = MD5Hash.read(in);
   }
   }
-
-  /** {@inheritDoc} */ 
+ 
+  @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     out.writeInt(bytesPerCRC);
     out.writeInt(bytesPerCRC);
     out.writeLong(crcPerBlock);
     out.writeLong(crcPerBlock);
@@ -161,8 +161,8 @@ public class MD5MD5CRC32FileChecksum extends FileChecksum {
           + ", md5=" + md5, e);
           + ", md5=" + md5, e);
     }
     }
   }
   }
-
-  /** {@inheritDoc} */ 
+ 
+  @Override
   public String toString() {
   public String toString() {
     return getAlgorithmName() + ":" + md5;
     return getAlgorithmName() + ":" + md5;
   }
   }

+ 0 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java

@@ -22,7 +22,6 @@ import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.DataChecksum;
 import org.apache.hadoop.util.DataChecksum;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.Progressable;
-import org.apache.hadoop.HadoopIllegalArgumentException;
 
 
 /**
 /**
  * This class contains options related to file system operations.
  * This class contains options related to file system operations.

+ 4 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java

@@ -261,6 +261,7 @@ public class Path implements Comparable {
     return new Path(getParent(), getName()+suffix);
     return new Path(getParent(), getName()+suffix);
   }
   }
 
 
+  @Override
   public String toString() {
   public String toString() {
     // we can't use uri.toString(), which escapes everything, because we want
     // we can't use uri.toString(), which escapes everything, because we want
     // illegal characters unescaped in the string, for glob processing, etc.
     // illegal characters unescaped in the string, for glob processing, etc.
@@ -289,6 +290,7 @@ public class Path implements Comparable {
     return buffer.toString();
     return buffer.toString();
   }
   }
 
 
+  @Override
   public boolean equals(Object o) {
   public boolean equals(Object o) {
     if (!(o instanceof Path)) {
     if (!(o instanceof Path)) {
       return false;
       return false;
@@ -297,10 +299,12 @@ public class Path implements Comparable {
     return this.uri.equals(that.uri);
     return this.uri.equals(that.uri);
   }
   }
 
 
+  @Override
   public int hashCode() {
   public int hashCode() {
     return uri.hashCode();
     return uri.hashCode();
   }
   }
 
 
+  @Override
   public int compareTo(Object o) {
   public int compareTo(Object o) {
     Path that = (Path)o;
     Path that = (Path)o;
     return this.uri.compareTo(that.uri);
     return this.uri.compareTo(that.uri);

+ 30 - 6
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java

@@ -72,8 +72,10 @@ public class RawLocalFileSystem extends FileSystem {
     return new File(path.toUri().getPath());
     return new File(path.toUri().getPath());
   }
   }
 
 
+  @Override
   public URI getUri() { return NAME; }
   public URI getUri() { return NAME; }
   
   
+  @Override
   public void initialize(URI uri, Configuration conf) throws IOException {
   public void initialize(URI uri, Configuration conf) throws IOException {
     super.initialize(uri, conf);
     super.initialize(uri, conf);
     setConf(conf);
     setConf(conf);
@@ -84,6 +86,7 @@ public class RawLocalFileSystem extends FileSystem {
       super(f);
       super(f);
     }
     }
     
     
+    @Override
     public int read() throws IOException {
     public int read() throws IOException {
       int result = super.read();
       int result = super.read();
       if (result != -1) {
       if (result != -1) {
@@ -92,6 +95,7 @@ public class RawLocalFileSystem extends FileSystem {
       return result;
       return result;
     }
     }
     
     
+    @Override
     public int read(byte[] data) throws IOException {
     public int read(byte[] data) throws IOException {
       int result = super.read(data);
       int result = super.read(data);
       if (result != -1) {
       if (result != -1) {
@@ -100,6 +104,7 @@ public class RawLocalFileSystem extends FileSystem {
       return result;
       return result;
     }
     }
     
     
+    @Override
     public int read(byte[] data, int offset, int length) throws IOException {
     public int read(byte[] data, int offset, int length) throws IOException {
       int result = super.read(data, offset, length);
       int result = super.read(data, offset, length);
       if (result != -1) {
       if (result != -1) {
@@ -120,15 +125,18 @@ public class RawLocalFileSystem extends FileSystem {
       this.fis = new TrackingFileInputStream(pathToFile(f));
       this.fis = new TrackingFileInputStream(pathToFile(f));
     }
     }
     
     
+    @Override
     public void seek(long pos) throws IOException {
     public void seek(long pos) throws IOException {
       fis.getChannel().position(pos);
       fis.getChannel().position(pos);
       this.position = pos;
       this.position = pos;
     }
     }
     
     
+    @Override
     public long getPos() throws IOException {
     public long getPos() throws IOException {
       return this.position;
       return this.position;
     }
     }
     
     
+    @Override
     public boolean seekToNewSource(long targetPos) throws IOException {
     public boolean seekToNewSource(long targetPos) throws IOException {
       return false;
       return false;
     }
     }
@@ -136,11 +144,14 @@ public class RawLocalFileSystem extends FileSystem {
     /*
     /*
      * Just forward to the fis
      * Just forward to the fis
      */
      */
+    @Override
     public int available() throws IOException { return fis.available(); }
     public int available() throws IOException { return fis.available(); }
+    @Override
     public void close() throws IOException { fis.close(); }
     public void close() throws IOException { fis.close(); }
     @Override
     @Override
     public boolean markSupported() { return false; }
     public boolean markSupported() { return false; }
     
     
+    @Override
     public int read() throws IOException {
     public int read() throws IOException {
       try {
       try {
         int value = fis.read();
         int value = fis.read();
@@ -153,6 +164,7 @@ public class RawLocalFileSystem extends FileSystem {
       }
       }
     }
     }
     
     
+    @Override
     public int read(byte[] b, int off, int len) throws IOException {
     public int read(byte[] b, int off, int len) throws IOException {
       try {
       try {
         int value = fis.read(b, off, len);
         int value = fis.read(b, off, len);
@@ -165,6 +177,7 @@ public class RawLocalFileSystem extends FileSystem {
       }
       }
     }
     }
     
     
+    @Override
     public int read(long position, byte[] b, int off, int len)
     public int read(long position, byte[] b, int off, int len)
       throws IOException {
       throws IOException {
       ByteBuffer bb = ByteBuffer.wrap(b, off, len);
       ByteBuffer bb = ByteBuffer.wrap(b, off, len);
@@ -175,6 +188,7 @@ public class RawLocalFileSystem extends FileSystem {
       }
       }
     }
     }
     
     
+    @Override
     public long skip(long n) throws IOException {
     public long skip(long n) throws IOException {
       long value = fis.skip(n);
       long value = fis.skip(n);
       if (value > 0) {
       if (value > 0) {
@@ -189,6 +203,7 @@ public class RawLocalFileSystem extends FileSystem {
     }
     }
   }
   }
   
   
+  @Override
   public FSDataInputStream open(Path f, int bufferSize) throws IOException {
   public FSDataInputStream open(Path f, int bufferSize) throws IOException {
     if (!exists(f)) {
     if (!exists(f)) {
       throw new FileNotFoundException(f.toString());
       throw new FileNotFoundException(f.toString());
@@ -210,8 +225,11 @@ public class RawLocalFileSystem extends FileSystem {
     /*
     /*
      * Just forward to the fos
      * Just forward to the fos
      */
      */
+    @Override
     public void close() throws IOException { fos.close(); }
     public void close() throws IOException { fos.close(); }
+    @Override
     public void flush() throws IOException { fos.flush(); }
     public void flush() throws IOException { fos.flush(); }
+    @Override
     public void write(byte[] b, int off, int len) throws IOException {
     public void write(byte[] b, int off, int len) throws IOException {
       try {
       try {
         fos.write(b, off, len);
         fos.write(b, off, len);
@@ -220,6 +238,7 @@ public class RawLocalFileSystem extends FileSystem {
       }
       }
     }
     }
     
     
+    @Override
     public void write(int b) throws IOException {
     public void write(int b) throws IOException {
       try {
       try {
         fos.write(b);
         fos.write(b);
@@ -229,7 +248,7 @@ public class RawLocalFileSystem extends FileSystem {
     }
     }
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public FSDataOutputStream append(Path f, int bufferSize,
   public FSDataOutputStream append(Path f, int bufferSize,
       Progressable progress) throws IOException {
       Progressable progress) throws IOException {
     if (!exists(f)) {
     if (!exists(f)) {
@@ -242,7 +261,6 @@ public class RawLocalFileSystem extends FileSystem {
         new LocalFSFileOutputStream(f, true), bufferSize), statistics);
         new LocalFSFileOutputStream(f, true), bufferSize), statistics);
   }
   }
 
 
-  /** {@inheritDoc} */
   @Override
   @Override
   public FSDataOutputStream create(Path f, boolean overwrite, int bufferSize,
   public FSDataOutputStream create(Path f, boolean overwrite, int bufferSize,
     short replication, long blockSize, Progressable progress)
     short replication, long blockSize, Progressable progress)
@@ -264,7 +282,6 @@ public class RawLocalFileSystem extends FileSystem {
         new LocalFSFileOutputStream(f, false), bufferSize), statistics);
         new LocalFSFileOutputStream(f, false), bufferSize), statistics);
   }
   }
 
 
-  /** {@inheritDoc} */
   @Override
   @Override
   public FSDataOutputStream create(Path f, FsPermission permission,
   public FSDataOutputStream create(Path f, FsPermission permission,
     boolean overwrite, int bufferSize, short replication, long blockSize,
     boolean overwrite, int bufferSize, short replication, long blockSize,
@@ -276,7 +293,6 @@ public class RawLocalFileSystem extends FileSystem {
     return out;
     return out;
   }
   }
 
 
-  /** {@inheritDoc} */
   @Override
   @Override
   public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
   public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
       boolean overwrite,
       boolean overwrite,
@@ -288,6 +304,7 @@ public class RawLocalFileSystem extends FileSystem {
     return out;
     return out;
   }
   }
 
 
+  @Override
   public boolean rename(Path src, Path dst) throws IOException {
   public boolean rename(Path src, Path dst) throws IOException {
     if (pathToFile(src).renameTo(pathToFile(dst))) {
     if (pathToFile(src).renameTo(pathToFile(dst))) {
       return true;
       return true;
@@ -302,6 +319,7 @@ public class RawLocalFileSystem extends FileSystem {
    * @return true if the file or directory and all its contents were deleted
    * @return true if the file or directory and all its contents were deleted
    * @throws IOException if p is non-empty and recursive is false 
    * @throws IOException if p is non-empty and recursive is false 
    */
    */
+  @Override
   public boolean delete(Path p, boolean recursive) throws IOException {
   public boolean delete(Path p, boolean recursive) throws IOException {
     File f = pathToFile(p);
     File f = pathToFile(p);
     if (f.isFile()) {
     if (f.isFile()) {
@@ -319,6 +337,7 @@ public class RawLocalFileSystem extends FileSystem {
    * (<b>Note</b>: Returned list is not sorted in any given order,
    * (<b>Note</b>: Returned list is not sorted in any given order,
    * due to reliance on Java's {@link File#list()} API.)
    * due to reliance on Java's {@link File#list()} API.)
    */
    */
+  @Override
   public FileStatus[] listStatus(Path f) throws IOException {
   public FileStatus[] listStatus(Path f) throws IOException {
     File localf = pathToFile(f);
     File localf = pathToFile(f);
     FileStatus[] results;
     FileStatus[] results;
@@ -356,6 +375,7 @@ public class RawLocalFileSystem extends FileSystem {
    * Creates the specified directory hierarchy. Does not
    * Creates the specified directory hierarchy. Does not
    * treat existence as an error.
    * treat existence as an error.
    */
    */
+  @Override
   public boolean mkdirs(Path f) throws IOException {
   public boolean mkdirs(Path f) throws IOException {
     if(f == null) {
     if(f == null) {
       throw new IllegalArgumentException("mkdirs path arg is null");
       throw new IllegalArgumentException("mkdirs path arg is null");
@@ -373,7 +393,6 @@ public class RawLocalFileSystem extends FileSystem {
       (p2f.mkdir() || p2f.isDirectory());
       (p2f.mkdir() || p2f.isDirectory());
   }
   }
 
 
-  /** {@inheritDoc} */
   @Override
   @Override
   public boolean mkdirs(Path f, FsPermission permission) throws IOException {
   public boolean mkdirs(Path f, FsPermission permission) throws IOException {
     boolean b = mkdirs(f);
     boolean b = mkdirs(f);
@@ -418,7 +437,6 @@ public class RawLocalFileSystem extends FileSystem {
     return this.makeQualified(new Path(System.getProperty("user.dir")));
     return this.makeQualified(new Path(System.getProperty("user.dir")));
   }
   }
 
 
-  /** {@inheritDoc} */
   @Override
   @Override
   public FsStatus getStatus(Path p) throws IOException {
   public FsStatus getStatus(Path p) throws IOException {
     File partition = pathToFile(p == null ? new Path("/") : p);
     File partition = pathToFile(p == null ? new Path("/") : p);
@@ -430,29 +448,35 @@ public class RawLocalFileSystem extends FileSystem {
   }
   }
   
   
   // In the case of the local filesystem, we can just rename the file.
   // In the case of the local filesystem, we can just rename the file.
+  @Override
   public void moveFromLocalFile(Path src, Path dst) throws IOException {
   public void moveFromLocalFile(Path src, Path dst) throws IOException {
     rename(src, dst);
     rename(src, dst);
   }
   }
   
   
   // We can write output directly to the final location
   // We can write output directly to the final location
+  @Override
   public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile)
   public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile)
     throws IOException {
     throws IOException {
     return fsOutputFile;
     return fsOutputFile;
   }
   }
   
   
   // It's in the right place - nothing to do.
   // It's in the right place - nothing to do.
+  @Override
   public void completeLocalOutput(Path fsWorkingFile, Path tmpLocalFile)
   public void completeLocalOutput(Path fsWorkingFile, Path tmpLocalFile)
     throws IOException {
     throws IOException {
   }
   }
   
   
+  @Override
   public void close() throws IOException {
   public void close() throws IOException {
     super.close();
     super.close();
   }
   }
   
   
+  @Override
   public String toString() {
   public String toString() {
     return "LocalFS";
     return "LocalFS";
   }
   }
   
   
+  @Override
   public FileStatus getFileStatus(Path f) throws IOException {
   public FileStatus getFileStatus(Path f) throws IOException {
     File path = pathToFile(f);
     File path = pathToFile(f);
     if (path.exists()) {
     if (path.exists()) {

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java

@@ -263,6 +263,7 @@ public class TrashPolicyDefault extends TrashPolicy {
       }
       }
     }
     }
 
 
+    @Override
     public void run() {
     public void run() {
       if (emptierInterval == 0)
       if (emptierInterval == 0)
         return;                                   // trash disabled
         return;                                   // trash disabled

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java

@@ -262,6 +262,7 @@ public class FTPFileSystem extends FileSystem {
   }
   }
 
 
   /** This optional operation is not yet supported. */
   /** This optional operation is not yet supported. */
+  @Override
   public FSDataOutputStream append(Path f, int bufferSize,
   public FSDataOutputStream append(Path f, int bufferSize,
       Progressable progress) throws IOException {
       Progressable progress) throws IOException {
     throw new IOException("Not supported");
     throw new IOException("Not supported");

+ 9 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java

@@ -51,19 +51,23 @@ public class FTPInputStream extends FSInputStream {
     this.closed = false;
     this.closed = false;
   }
   }
 
 
+  @Override
   public long getPos() throws IOException {
   public long getPos() throws IOException {
     return pos;
     return pos;
   }
   }
 
 
   // We don't support seek.
   // We don't support seek.
+  @Override
   public void seek(long pos) throws IOException {
   public void seek(long pos) throws IOException {
     throw new IOException("Seek not supported");
     throw new IOException("Seek not supported");
   }
   }
 
 
+  @Override
   public boolean seekToNewSource(long targetPos) throws IOException {
   public boolean seekToNewSource(long targetPos) throws IOException {
     throw new IOException("Seek not supported");
     throw new IOException("Seek not supported");
   }
   }
 
 
+  @Override
   public synchronized int read() throws IOException {
   public synchronized int read() throws IOException {
     if (closed) {
     if (closed) {
       throw new IOException("Stream closed");
       throw new IOException("Stream closed");
@@ -79,6 +83,7 @@ public class FTPInputStream extends FSInputStream {
     return byteRead;
     return byteRead;
   }
   }
 
 
+  @Override
   public synchronized int read(byte buf[], int off, int len) throws IOException {
   public synchronized int read(byte buf[], int off, int len) throws IOException {
     if (closed) {
     if (closed) {
       throw new IOException("Stream closed");
       throw new IOException("Stream closed");
@@ -95,6 +100,7 @@ public class FTPInputStream extends FSInputStream {
     return result;
     return result;
   }
   }
 
 
+  @Override
   public synchronized void close() throws IOException {
   public synchronized void close() throws IOException {
     if (closed) {
     if (closed) {
       throw new IOException("Stream closed");
       throw new IOException("Stream closed");
@@ -116,14 +122,17 @@ public class FTPInputStream extends FSInputStream {
 
 
   // Not supported.
   // Not supported.
 
 
+  @Override
   public boolean markSupported() {
   public boolean markSupported() {
     return false;
     return false;
   }
   }
 
 
+  @Override
   public void mark(int readLimit) {
   public void mark(int readLimit) {
     // Do nothing
     // Do nothing
   }
   }
 
 
+  @Override
   public void reset() throws IOException {
   public void reset() throws IOException {
     throw new IOException("Mark not supported");
     throw new IOException("Mark not supported");
   }
   }

+ 17 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSImpl.java

@@ -50,22 +50,27 @@ class KFSImpl implements IFSImpl {
         statistics = stats;
         statistics = stats;
     }
     }
 
 
+    @Override
     public boolean exists(String path) throws IOException {
     public boolean exists(String path) throws IOException {
         return kfsAccess.kfs_exists(path);
         return kfsAccess.kfs_exists(path);
     }
     }
 
 
+    @Override
     public boolean isDirectory(String path) throws IOException {
     public boolean isDirectory(String path) throws IOException {
         return kfsAccess.kfs_isDirectory(path);
         return kfsAccess.kfs_isDirectory(path);
     }
     }
 
 
+    @Override
     public boolean isFile(String path) throws IOException {
     public boolean isFile(String path) throws IOException {
         return kfsAccess.kfs_isFile(path);
         return kfsAccess.kfs_isFile(path);
     }
     }
 
 
+    @Override
     public String[] readdir(String path) throws IOException {
     public String[] readdir(String path) throws IOException {
         return kfsAccess.kfs_readdir(path);
         return kfsAccess.kfs_readdir(path);
     }
     }
 
 
+    @Override
     public FileStatus[] readdirplus(Path path) throws IOException {
     public FileStatus[] readdirplus(Path path) throws IOException {
         String srep = path.toUri().getPath();
         String srep = path.toUri().getPath();
         KfsFileAttr[] fattr = kfsAccess.kfs_readdirplus(srep);
         KfsFileAttr[] fattr = kfsAccess.kfs_readdirplus(srep);
@@ -100,52 +105,64 @@ class KFSImpl implements IFSImpl {
     }
     }
 
 
 
 
+    @Override
     public int mkdirs(String path) throws IOException {
     public int mkdirs(String path) throws IOException {
         return kfsAccess.kfs_mkdirs(path);
         return kfsAccess.kfs_mkdirs(path);
     }
     }
 
 
+    @Override
     public int rename(String source, String dest) throws IOException {
     public int rename(String source, String dest) throws IOException {
         return kfsAccess.kfs_rename(source, dest);
         return kfsAccess.kfs_rename(source, dest);
     }
     }
 
 
+    @Override
     public int rmdir(String path) throws IOException {
     public int rmdir(String path) throws IOException {
         return kfsAccess.kfs_rmdir(path);
         return kfsAccess.kfs_rmdir(path);
     }
     }
 
 
+    @Override
     public int remove(String path) throws IOException {
     public int remove(String path) throws IOException {
         return kfsAccess.kfs_remove(path);
         return kfsAccess.kfs_remove(path);
     }
     }
 
 
+    @Override
     public long filesize(String path) throws IOException {
     public long filesize(String path) throws IOException {
         return kfsAccess.kfs_filesize(path);
         return kfsAccess.kfs_filesize(path);
     }
     }
 
 
+    @Override
     public short getReplication(String path) throws IOException {
     public short getReplication(String path) throws IOException {
         return kfsAccess.kfs_getReplication(path);
         return kfsAccess.kfs_getReplication(path);
     }
     }
 
 
+    @Override
     public short setReplication(String path, short replication) throws IOException {
     public short setReplication(String path, short replication) throws IOException {
         return kfsAccess.kfs_setReplication(path, replication);
         return kfsAccess.kfs_setReplication(path, replication);
     }
     }
 
 
+    @Override
     public String[][] getDataLocation(String path, long start, long len) throws IOException {
     public String[][] getDataLocation(String path, long start, long len) throws IOException {
         return kfsAccess.kfs_getDataLocation(path, start, len);
         return kfsAccess.kfs_getDataLocation(path, start, len);
     }
     }
 
 
+    @Override
     public long getModificationTime(String path) throws IOException {
     public long getModificationTime(String path) throws IOException {
         return kfsAccess.kfs_getModificationTime(path);
         return kfsAccess.kfs_getModificationTime(path);
     }
     }
 
 
+    @Override
     public FSDataInputStream open(String path, int bufferSize) throws IOException {
     public FSDataInputStream open(String path, int bufferSize) throws IOException {
         return new FSDataInputStream(new KFSInputStream(kfsAccess, path, 
         return new FSDataInputStream(new KFSInputStream(kfsAccess, path, 
                                                         statistics));
                                                         statistics));
     }
     }
 
 
+    @Override
     public FSDataOutputStream create(String path, short replication, int bufferSize, Progressable progress) throws IOException {
     public FSDataOutputStream create(String path, short replication, int bufferSize, Progressable progress) throws IOException {
         return new FSDataOutputStream(new KFSOutputStream(kfsAccess, path, replication, false, progress), 
         return new FSDataOutputStream(new KFSOutputStream(kfsAccess, path, replication, false, progress), 
                                       statistics);
                                       statistics);
     }
     }
 
 
+    @Override
     public FSDataOutputStream append(String path, int bufferSize, Progressable progress) throws IOException {
     public FSDataOutputStream append(String path, int bufferSize, Progressable progress) throws IOException {
         // when opening for append, # of replicas is ignored
         // when opening for append, # of replicas is ignored
         return new FSDataOutputStream(new KFSOutputStream(kfsAccess, path, (short) 1, true, progress), 
         return new FSDataOutputStream(new KFSOutputStream(kfsAccess, path, (short) 1, true, progress), 

+ 10 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSInputStream.java

@@ -53,6 +53,7 @@ class KFSInputStream extends FSInputStream {
             this.fsize = 0;
             this.fsize = 0;
     }
     }
 
 
+    @Override
     public long getPos() throws IOException {
     public long getPos() throws IOException {
         if (kfsChannel == null) {
         if (kfsChannel == null) {
             throw new IOException("File closed");
             throw new IOException("File closed");
@@ -60,6 +61,7 @@ class KFSInputStream extends FSInputStream {
         return kfsChannel.tell();
         return kfsChannel.tell();
     }
     }
 
 
+    @Override
     public synchronized int available() throws IOException {
     public synchronized int available() throws IOException {
         if (kfsChannel == null) {
         if (kfsChannel == null) {
             throw new IOException("File closed");
             throw new IOException("File closed");
@@ -67,6 +69,7 @@ class KFSInputStream extends FSInputStream {
         return (int) (this.fsize - getPos());
         return (int) (this.fsize - getPos());
     }
     }
 
 
+    @Override
     public synchronized void seek(long targetPos) throws IOException {
     public synchronized void seek(long targetPos) throws IOException {
         if (kfsChannel == null) {
         if (kfsChannel == null) {
             throw new IOException("File closed");
             throw new IOException("File closed");
@@ -74,10 +77,12 @@ class KFSInputStream extends FSInputStream {
         kfsChannel.seek(targetPos);
         kfsChannel.seek(targetPos);
     }
     }
 
 
+    @Override
     public synchronized boolean seekToNewSource(long targetPos) throws IOException {
     public synchronized boolean seekToNewSource(long targetPos) throws IOException {
         return false;
         return false;
     }
     }
 
 
+    @Override
     public synchronized int read() throws IOException {
     public synchronized int read() throws IOException {
         if (kfsChannel == null) {
         if (kfsChannel == null) {
             throw new IOException("File closed");
             throw new IOException("File closed");
@@ -93,6 +98,7 @@ class KFSInputStream extends FSInputStream {
         return -1;
         return -1;
     }
     }
 
 
+    @Override
     public synchronized int read(byte b[], int off, int len) throws IOException {
     public synchronized int read(byte b[], int off, int len) throws IOException {
         if (kfsChannel == null) {
         if (kfsChannel == null) {
             throw new IOException("File closed");
             throw new IOException("File closed");
@@ -109,6 +115,7 @@ class KFSInputStream extends FSInputStream {
 	return res;
 	return res;
     }
     }
 
 
+    @Override
     public synchronized void close() throws IOException {
     public synchronized void close() throws IOException {
         if (kfsChannel == null) {
         if (kfsChannel == null) {
             return;
             return;
@@ -118,14 +125,17 @@ class KFSInputStream extends FSInputStream {
         kfsChannel = null;
         kfsChannel = null;
     }
     }
 
 
+    @Override
     public boolean markSupported() {
     public boolean markSupported() {
         return false;
         return false;
     }
     }
 
 
+    @Override
     public void mark(int readLimit) {
     public void mark(int readLimit) {
         // Do nothing
         // Do nothing
     }
     }
 
 
+    @Override
     public void reset() throws IOException {
     public void reset() throws IOException {
         throw new IOException("Mark not supported");
         throw new IOException("Mark not supported");
     }
     }

+ 4 - 5
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSOutputStream.java

@@ -20,15 +20,10 @@
 package org.apache.hadoop.fs.kfs;
 package org.apache.hadoop.fs.kfs;
 
 
 import java.io.*;
 import java.io.*;
-import java.net.*;
-import java.util.*;
 import java.nio.ByteBuffer;
 import java.nio.ByteBuffer;
 
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.Progressable;
 
 
 import org.kosmix.kosmosfs.access.KfsAccess;
 import org.kosmix.kosmosfs.access.KfsAccess;
@@ -60,6 +55,7 @@ class KFSOutputStream extends OutputStream {
         return kfsChannel.tell();
         return kfsChannel.tell();
     }
     }
 
 
+    @Override
     public void write(int v) throws IOException {
     public void write(int v) throws IOException {
         if (kfsChannel == null) {
         if (kfsChannel == null) {
             throw new IOException("File closed");
             throw new IOException("File closed");
@@ -70,6 +66,7 @@ class KFSOutputStream extends OutputStream {
         write(b, 0, 1);
         write(b, 0, 1);
     }
     }
 
 
+    @Override
     public void write(byte b[], int off, int len) throws IOException {
     public void write(byte b[], int off, int len) throws IOException {
         if (kfsChannel == null) {
         if (kfsChannel == null) {
             throw new IOException("File closed");
             throw new IOException("File closed");
@@ -80,6 +77,7 @@ class KFSOutputStream extends OutputStream {
         kfsChannel.write(ByteBuffer.wrap(b, off, len));
         kfsChannel.write(ByteBuffer.wrap(b, off, len));
     }
     }
 
 
+    @Override
     public void flush() throws IOException {
     public void flush() throws IOException {
         if (kfsChannel == null) {
         if (kfsChannel == null) {
             throw new IOException("File closed");
             throw new IOException("File closed");
@@ -89,6 +87,7 @@ class KFSOutputStream extends OutputStream {
         kfsChannel.sync();
         kfsChannel.sync();
     }
     }
 
 
+    @Override
     public synchronized void close() throws IOException {
     public synchronized void close() throws IOException {
         if (kfsChannel == null) {
         if (kfsChannel == null) {
             return;
             return;

+ 8 - 5
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java

@@ -40,6 +40,7 @@ public class FsPermission implements Writable {
   private static final Log LOG = LogFactory.getLog(FsPermission.class);
   private static final Log LOG = LogFactory.getLog(FsPermission.class);
 
 
   static final WritableFactory FACTORY = new WritableFactory() {
   static final WritableFactory FACTORY = new WritableFactory() {
+    @Override
     public Writable newInstance() { return new FsPermission(); }
     public Writable newInstance() { return new FsPermission(); }
   };
   };
   static {                                      // register a ctor
   static {                                      // register a ctor
@@ -124,12 +125,12 @@ public class FsPermission implements Writable {
     set(v[(n >>> 6) & 7], v[(n >>> 3) & 7], v[n & 7], (((n >>> 9) & 1) == 1) );
     set(v[(n >>> 6) & 7], v[(n >>> 3) & 7], v[n & 7], (((n >>> 9) & 1) == 1) );
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     out.writeShort(toShort());
     out.writeShort(toShort());
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     fromShort(in.readShort());
     fromShort(in.readShort());
   }
   }
@@ -155,7 +156,7 @@ public class FsPermission implements Writable {
     return (short)s;
     return (short)s;
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public boolean equals(Object obj) {
   public boolean equals(Object obj) {
     if (obj instanceof FsPermission) {
     if (obj instanceof FsPermission) {
       FsPermission that = (FsPermission)obj;
       FsPermission that = (FsPermission)obj;
@@ -167,10 +168,10 @@ public class FsPermission implements Writable {
     return false;
     return false;
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public int hashCode() {return toShort();}
   public int hashCode() {return toShort();}
 
 
-  /** {@inheritDoc} */
+  @Override
   public String toString() {
   public String toString() {
     String str = useraction.SYMBOL + groupaction.SYMBOL + otheraction.SYMBOL;
     String str = useraction.SYMBOL + groupaction.SYMBOL + otheraction.SYMBOL;
     if(stickyBit) {
     if(stickyBit) {
@@ -300,9 +301,11 @@ public class FsPermission implements Writable {
     public ImmutableFsPermission(short permission) {
     public ImmutableFsPermission(short permission) {
       super(permission);
       super(permission);
     }
     }
+    @Override
     public FsPermission applyUMask(FsPermission umask) {
     public FsPermission applyUMask(FsPermission umask) {
       throw new UnsupportedOperationException();
       throw new UnsupportedOperationException();
     }
     }
+    @Override
     public void readFields(DataInput in) throws IOException {
     public void readFields(DataInput in) throws IOException {
       throw new UnsupportedOperationException();
       throw new UnsupportedOperationException();
     }    
     }    

+ 6 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java

@@ -32,6 +32,7 @@ import java.io.IOException;
 @InterfaceStability.Unstable
 @InterfaceStability.Unstable
 public class PermissionStatus implements Writable {
 public class PermissionStatus implements Writable {
   static final WritableFactory FACTORY = new WritableFactory() {
   static final WritableFactory FACTORY = new WritableFactory() {
+    @Override
     public Writable newInstance() { return new PermissionStatus(); }
     public Writable newInstance() { return new PermissionStatus(); }
   };
   };
   static {                                      // register a ctor
   static {                                      // register a ctor
@@ -42,9 +43,11 @@ public class PermissionStatus implements Writable {
   public static PermissionStatus createImmutable(
   public static PermissionStatus createImmutable(
       String user, String group, FsPermission permission) {
       String user, String group, FsPermission permission) {
     return new PermissionStatus(user, group, permission) {
     return new PermissionStatus(user, group, permission) {
+      @Override
       public PermissionStatus applyUMask(FsPermission umask) {
       public PermissionStatus applyUMask(FsPermission umask) {
         throw new UnsupportedOperationException();
         throw new UnsupportedOperationException();
       }
       }
+      @Override
       public void readFields(DataInput in) throws IOException {
       public void readFields(DataInput in) throws IOException {
         throw new UnsupportedOperationException();
         throw new UnsupportedOperationException();
       }
       }
@@ -82,14 +85,14 @@ public class PermissionStatus implements Writable {
     return this;
     return this;
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     username = Text.readString(in, Text.DEFAULT_MAX_LEN);
     username = Text.readString(in, Text.DEFAULT_MAX_LEN);
     groupname = Text.readString(in, Text.DEFAULT_MAX_LEN);
     groupname = Text.readString(in, Text.DEFAULT_MAX_LEN);
     permission = FsPermission.read(in);
     permission = FsPermission.read(in);
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     write(out, username, groupname, permission);
     write(out, username, groupname, permission);
   }
   }
@@ -115,7 +118,7 @@ public class PermissionStatus implements Writable {
     permission.write(out);
     permission.write(out);
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public String toString() {
   public String toString() {
     return username + ":" + groupname + ":" + permission;
     return username + ":" + groupname + ":" + permission;
   }
   }

+ 14 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java

@@ -83,6 +83,7 @@ class Jets3tFileSystemStore implements FileSystemStore {
   private static final Log LOG = 
   private static final Log LOG = 
     LogFactory.getLog(Jets3tFileSystemStore.class.getName());
     LogFactory.getLog(Jets3tFileSystemStore.class.getName());
   
   
+  @Override
   public void initialize(URI uri, Configuration conf) throws IOException {
   public void initialize(URI uri, Configuration conf) throws IOException {
     
     
     this.conf = conf;
     this.conf = conf;
@@ -108,6 +109,7 @@ class Jets3tFileSystemStore implements FileSystemStore {
 		      );
 		      );
   }
   }
 
 
+  @Override
   public String getVersion() throws IOException {
   public String getVersion() throws IOException {
     return FILE_SYSTEM_VERSION_VALUE;
     return FILE_SYSTEM_VERSION_VALUE;
   }
   }
@@ -123,14 +125,17 @@ class Jets3tFileSystemStore implements FileSystemStore {
     }
     }
   }
   }
 
 
+  @Override
   public void deleteINode(Path path) throws IOException {
   public void deleteINode(Path path) throws IOException {
     delete(pathToKey(path));
     delete(pathToKey(path));
   }
   }
 
 
+  @Override
   public void deleteBlock(Block block) throws IOException {
   public void deleteBlock(Block block) throws IOException {
     delete(blockToKey(block));
     delete(blockToKey(block));
   }
   }
 
 
+  @Override
   public boolean inodeExists(Path path) throws IOException {
   public boolean inodeExists(Path path) throws IOException {
     InputStream in = get(pathToKey(path), true);
     InputStream in = get(pathToKey(path), true);
     if (in == null) {
     if (in == null) {
@@ -140,6 +145,7 @@ class Jets3tFileSystemStore implements FileSystemStore {
     return true;
     return true;
   }
   }
   
   
+  @Override
   public boolean blockExists(long blockId) throws IOException {
   public boolean blockExists(long blockId) throws IOException {
     InputStream in = get(blockToKey(blockId), false);
     InputStream in = get(blockToKey(blockId), false);
     if (in == null) {
     if (in == null) {
@@ -203,10 +209,12 @@ class Jets3tFileSystemStore implements FileSystemStore {
     }
     }
   }
   }
 
 
+  @Override
   public INode retrieveINode(Path path) throws IOException {
   public INode retrieveINode(Path path) throws IOException {
     return INode.deserialize(get(pathToKey(path), true));
     return INode.deserialize(get(pathToKey(path), true));
   }
   }
 
 
+  @Override
   public File retrieveBlock(Block block, long byteRangeStart)
   public File retrieveBlock(Block block, long byteRangeStart)
     throws IOException {
     throws IOException {
     File fileBlock = null;
     File fileBlock = null;
@@ -249,6 +257,7 @@ class Jets3tFileSystemStore implements FileSystemStore {
     return result;
     return result;
   }
   }
 
 
+  @Override
   public Set<Path> listSubPaths(Path path) throws IOException {
   public Set<Path> listSubPaths(Path path) throws IOException {
     try {
     try {
       String prefix = pathToKey(path);
       String prefix = pathToKey(path);
@@ -270,6 +279,7 @@ class Jets3tFileSystemStore implements FileSystemStore {
     }
     }
   }
   }
   
   
+  @Override
   public Set<Path> listDeepSubPaths(Path path) throws IOException {
   public Set<Path> listDeepSubPaths(Path path) throws IOException {
     try {
     try {
       String prefix = pathToKey(path);
       String prefix = pathToKey(path);
@@ -311,10 +321,12 @@ class Jets3tFileSystemStore implements FileSystemStore {
     }
     }
   }
   }
 
 
+  @Override
   public void storeINode(Path path, INode inode) throws IOException {
   public void storeINode(Path path, INode inode) throws IOException {
     put(pathToKey(path), inode.serialize(), inode.getSerializedLength(), true);
     put(pathToKey(path), inode.serialize(), inode.getSerializedLength(), true);
   }
   }
 
 
+  @Override
   public void storeBlock(Block block, File file) throws IOException {
   public void storeBlock(Block block, File file) throws IOException {
     BufferedInputStream in = null;
     BufferedInputStream in = null;
     try {
     try {
@@ -354,6 +366,7 @@ class Jets3tFileSystemStore implements FileSystemStore {
     return blockToKey(block.getId());
     return blockToKey(block.getId());
   }
   }
 
 
+  @Override
   public void purge() throws IOException {
   public void purge() throws IOException {
     try {
     try {
       S3Object[] objects = s3Service.listObjects(bucket);
       S3Object[] objects = s3Service.listObjects(bucket);
@@ -368,6 +381,7 @@ class Jets3tFileSystemStore implements FileSystemStore {
     }
     }
   }
   }
 
 
+  @Override
   public void dump() throws IOException {
   public void dump() throws IOException {
     StringBuilder sb = new StringBuilder("S3 Filesystem, ");
     StringBuilder sb = new StringBuilder("S3 Filesystem, ");
     sb.append(bucket.getName()).append("\n");
     sb.append(bucket.getName()).append("\n");

+ 4 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java

@@ -61,6 +61,7 @@ public class MigrationTool extends Configured implements Tool {
     System.exit(res);
     System.exit(res);
   }
   }
   
   
+  @Override
   public int run(String[] args) throws Exception {
   public int run(String[] args) throws Exception {
     
     
     if (args.length == 0) {
     if (args.length == 0) {
@@ -195,6 +196,7 @@ public class MigrationTool extends Configured implements Tool {
   
   
   class UnversionedStore implements Store {
   class UnversionedStore implements Store {
 
 
+    @Override
     public Set<Path> listAllPaths() throws IOException {
     public Set<Path> listAllPaths() throws IOException {
       try {
       try {
         String prefix = urlEncode(Path.SEPARATOR);
         String prefix = urlEncode(Path.SEPARATOR);
@@ -212,6 +214,7 @@ public class MigrationTool extends Configured implements Tool {
       }   
       }   
     }
     }
 
 
+    @Override
     public void deleteINode(Path path) throws IOException {
     public void deleteINode(Path path) throws IOException {
       delete(pathToKey(path));
       delete(pathToKey(path));
     }
     }
@@ -227,6 +230,7 @@ public class MigrationTool extends Configured implements Tool {
       }
       }
     }
     }
     
     
+    @Override
     public INode retrieveINode(Path path) throws IOException {
     public INode retrieveINode(Path path) throws IOException {
       return INode.deserialize(get(pathToKey(path)));
       return INode.deserialize(get(pathToKey(path)));
     }
     }

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java

@@ -206,6 +206,7 @@ public class S3FileSystem extends FileSystem {
   }
   }
 
 
   /** This optional operation is not yet supported. */
   /** This optional operation is not yet supported. */
+  @Override
   public FSDataOutputStream append(Path f, int bufferSize,
   public FSDataOutputStream append(Path f, int bufferSize,
       Progressable progress) throws IOException {
       Progressable progress) throws IOException {
     throw new IOException("Not supported");
     throw new IOException("Not supported");
@@ -298,6 +299,7 @@ public class S3FileSystem extends FileSystem {
     return true;
     return true;
   }
   }
 
 
+  @Override
   public boolean delete(Path path, boolean recursive) throws IOException {
   public boolean delete(Path path, boolean recursive) throws IOException {
    Path absolutePath = makeAbsolute(path);
    Path absolutePath = makeAbsolute(path);
    INode inode = store.retrieveINode(absolutePath);
    INode inode = store.retrieveINode(absolutePath);

+ 12 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java

@@ -49,6 +49,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore {
   private S3Service s3Service;
   private S3Service s3Service;
   private S3Bucket bucket;
   private S3Bucket bucket;
   
   
+  @Override
   public void initialize(URI uri, Configuration conf) throws IOException {
   public void initialize(URI uri, Configuration conf) throws IOException {
     S3Credentials s3Credentials = new S3Credentials();
     S3Credentials s3Credentials = new S3Credentials();
     s3Credentials.initialize(uri, conf);
     s3Credentials.initialize(uri, conf);
@@ -63,6 +64,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore {
     bucket = new S3Bucket(uri.getHost());
     bucket = new S3Bucket(uri.getHost());
   }
   }
   
   
+  @Override
   public void storeFile(String key, File file, byte[] md5Hash)
   public void storeFile(String key, File file, byte[] md5Hash)
     throws IOException {
     throws IOException {
     
     
@@ -90,6 +92,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore {
     }
     }
   }
   }
 
 
+  @Override
   public void storeEmptyFile(String key) throws IOException {
   public void storeEmptyFile(String key) throws IOException {
     try {
     try {
       S3Object object = new S3Object(key);
       S3Object object = new S3Object(key);
@@ -102,6 +105,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore {
     }
     }
   }
   }
   
   
+  @Override
   public FileMetadata retrieveMetadata(String key) throws IOException {
   public FileMetadata retrieveMetadata(String key) throws IOException {
     try {
     try {
       S3Object object = s3Service.getObjectDetails(bucket, key);
       S3Object object = s3Service.getObjectDetails(bucket, key);
@@ -117,6 +121,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore {
     }
     }
   }
   }
   
   
+  @Override
   public InputStream retrieve(String key) throws IOException {
   public InputStream retrieve(String key) throws IOException {
     try {
     try {
       S3Object object = s3Service.getObject(bucket, key);
       S3Object object = s3Service.getObject(bucket, key);
@@ -127,6 +132,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore {
     }
     }
   }
   }
   
   
+  @Override
   public InputStream retrieve(String key, long byteRangeStart)
   public InputStream retrieve(String key, long byteRangeStart)
     throws IOException {
     throws IOException {
     try {
     try {
@@ -139,11 +145,13 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore {
     }
     }
   }
   }
 
 
+  @Override
   public PartialListing list(String prefix, int maxListingLength)
   public PartialListing list(String prefix, int maxListingLength)
     throws IOException {
     throws IOException {
     return list(prefix, maxListingLength, null, false);
     return list(prefix, maxListingLength, null, false);
   }
   }
   
   
+  @Override
   public PartialListing list(String prefix, int maxListingLength, String priorLastKey,
   public PartialListing list(String prefix, int maxListingLength, String priorLastKey,
       boolean recurse) throws IOException {
       boolean recurse) throws IOException {
 
 
@@ -175,6 +183,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore {
     }
     }
   }
   }
 
 
+  @Override
   public void delete(String key) throws IOException {
   public void delete(String key) throws IOException {
     try {
     try {
       s3Service.deleteObject(bucket, key);
       s3Service.deleteObject(bucket, key);
@@ -183,6 +192,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore {
     }
     }
   }
   }
   
   
+  @Override
   public void copy(String srcKey, String dstKey) throws IOException {
   public void copy(String srcKey, String dstKey) throws IOException {
     try {
     try {
       s3Service.copyObject(bucket.getName(), srcKey, bucket.getName(),
       s3Service.copyObject(bucket.getName(), srcKey, bucket.getName(),
@@ -192,6 +202,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore {
     }
     }
   }
   }
 
 
+  @Override
   public void purge(String prefix) throws IOException {
   public void purge(String prefix) throws IOException {
     try {
     try {
       S3Object[] objects = s3Service.listObjects(bucket, prefix, null);
       S3Object[] objects = s3Service.listObjects(bucket, prefix, null);
@@ -203,6 +214,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore {
     }
     }
   }
   }
 
 
+  @Override
   public void dump() throws IOException {
   public void dump() throws IOException {
     StringBuilder sb = new StringBuilder("S3 Native Filesystem, ");
     StringBuilder sb = new StringBuilder("S3 Native Filesystem, ");
     sb.append(bucket.getName()).append("\n");
     sb.append(bucket.getName()).append("\n");

+ 3 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFormat.java

@@ -150,6 +150,7 @@ public class CommandFormat {
       actual = got;
       actual = got;
     }
     }
 
 
+    @Override
     public String getMessage() {
     public String getMessage() {
       return "expected " + expected + " but got " + actual;
       return "expected " + expected + " but got " + actual;
     }
     }
@@ -165,6 +166,7 @@ public class CommandFormat {
       super(expected, actual);
       super(expected, actual);
     }
     }
 
 
+    @Override
     public String getMessage() {
     public String getMessage() {
       return "Too many arguments: " + super.getMessage();
       return "Too many arguments: " + super.getMessage();
     }
     }
@@ -180,6 +182,7 @@ public class CommandFormat {
       super(expected, actual);
       super(expected, actual);
     }
     }
 
 
+    @Override
     public String getMessage() {
     public String getMessage() {
       return "Not enough arguments: " + super.getMessage();
       return "Not enough arguments: " + super.getMessage();
     }
     }

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java

@@ -114,6 +114,7 @@ class Delete {
   static class Rmr extends Rm {
   static class Rmr extends Rm {
     public static final String NAME = "rmr";
     public static final String NAME = "rmr";
     
     
+    @Override
     protected void processOptions(LinkedList<String> args) throws IOException {
     protected void processOptions(LinkedList<String> args) throws IOException {
       args.addFirst("-r");
       args.addFirst("-r");
       super.processOptions(args);
       super.processOptions(args);
@@ -136,6 +137,7 @@ class Delete {
     
     
     private boolean ignoreNonEmpty = false;
     private boolean ignoreNonEmpty = false;
     
     
+    @Override
     protected void processOptions(LinkedList<String> args) throws IOException {
     protected void processOptions(LinkedList<String> args) throws IOException {
       CommandFormat cf = new CommandFormat(
       CommandFormat cf = new CommandFormat(
           1, Integer.MAX_VALUE, "-ignore-fail-on-non-empty");
           1, Integer.MAX_VALUE, "-ignore-fail-on-non-empty");

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java

@@ -161,6 +161,7 @@ class Display extends FsCommand {
       outbuf = new DataOutputBuffer();
       outbuf = new DataOutputBuffer();
     }
     }
 
 
+    @Override
     public int read() throws IOException {
     public int read() throws IOException {
       int ret;
       int ret;
       if (null == inbuf || -1 == (ret = inbuf.read())) {
       if (null == inbuf || -1 == (ret = inbuf.read())) {
@@ -180,6 +181,7 @@ class Display extends FsCommand {
       return ret;
       return ret;
     }
     }
 
 
+    @Override
     public void close() throws IOException {
     public void close() throws IOException {
       r.close();
       r.close();
       super.close();
       super.close();

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsCommand.java

@@ -73,6 +73,7 @@ abstract public class FsCommand extends Command {
   
   
   // abstract method that normally is invoked by runall() which is
   // abstract method that normally is invoked by runall() which is
   // overridden below
   // overridden below
+  @Override
   protected void run(Path path) throws IOException {
   protected void run(Path path) throws IOException {
     throw new RuntimeException("not supposed to get here");
     throw new RuntimeException("not supposed to get here");
   }
   }

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java

@@ -380,6 +380,7 @@ public class PathData implements Comparable<PathData> {
    * as given on the commandline, or the full path
    * as given on the commandline, or the full path
    * @return String of the path
    * @return String of the path
    */
    */
+  @Override
   public String toString() {
   public String toString() {
     String scheme = uri.getScheme();
     String scheme = uri.getScheme();
     // No interpretation of symbols. Just decode % escaped chars.
     // No interpretation of symbols. Just decode % escaped chars.

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java

@@ -102,6 +102,7 @@ class ChRootedFileSystem extends FilterFileSystem {
    *   for this FileSystem
    *   for this FileSystem
    * @param conf the configuration
    * @param conf the configuration
    */
    */
+  @Override
   public void initialize(final URI name, final Configuration conf)
   public void initialize(final URI name, final Configuration conf)
       throws IOException {
       throws IOException {
     super.initialize(name, conf);
     super.initialize(name, conf);

+ 0 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/NotInMountpointException.java

@@ -20,10 +20,6 @@ package org.apache.hadoop.fs.viewfs;
 
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.classification.InterfaceStability;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashSet;
-
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
 
 
 /**
 /**

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java

@@ -164,6 +164,7 @@ public class ViewFileSystem extends FileSystem {
    *          this FileSystem
    *          this FileSystem
    * @param conf the configuration
    * @param conf the configuration
    */
    */
+  @Override
   public void initialize(final URI theUri, final Configuration conf)
   public void initialize(final URI theUri, final Configuration conf)
       throws IOException {
       throws IOException {
     super.initialize(theUri, conf);
     super.initialize(theUri, conf);

+ 2 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFsFileStatus.java

@@ -42,7 +42,8 @@ class ViewFsFileStatus extends FileStatus {
      return super.equals(o);
      return super.equals(o);
    }
    }
    
    
-   public int hashCode() {
+   @Override
+  public int hashCode() {
      return super.hashCode();
      return super.hashCode();
    }
    }
    
    

+ 4 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java

@@ -892,6 +892,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
       final List<ACL> acl, final CreateMode mode)
       final List<ACL> acl, final CreateMode mode)
       throws InterruptedException, KeeperException {
       throws InterruptedException, KeeperException {
     return zkDoWithRetries(new ZKAction<String>() {
     return zkDoWithRetries(new ZKAction<String>() {
+      @Override
       public String run() throws KeeperException, InterruptedException {
       public String run() throws KeeperException, InterruptedException {
         return zkClient.create(path, data, acl, mode);
         return zkClient.create(path, data, acl, mode);
       }
       }
@@ -901,6 +902,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
   private byte[] getDataWithRetries(final String path, final boolean watch,
   private byte[] getDataWithRetries(final String path, final boolean watch,
       final Stat stat) throws InterruptedException, KeeperException {
       final Stat stat) throws InterruptedException, KeeperException {
     return zkDoWithRetries(new ZKAction<byte[]>() {
     return zkDoWithRetries(new ZKAction<byte[]>() {
+      @Override
       public byte[] run() throws KeeperException, InterruptedException {
       public byte[] run() throws KeeperException, InterruptedException {
         return zkClient.getData(path, watch, stat);
         return zkClient.getData(path, watch, stat);
       }
       }
@@ -910,6 +912,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
   private Stat setDataWithRetries(final String path, final byte[] data,
   private Stat setDataWithRetries(final String path, final byte[] data,
       final int version) throws InterruptedException, KeeperException {
       final int version) throws InterruptedException, KeeperException {
     return zkDoWithRetries(new ZKAction<Stat>() {
     return zkDoWithRetries(new ZKAction<Stat>() {
+      @Override
       public Stat run() throws KeeperException, InterruptedException {
       public Stat run() throws KeeperException, InterruptedException {
         return zkClient.setData(path, data, version);
         return zkClient.setData(path, data, version);
       }
       }
@@ -919,6 +922,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
   private void deleteWithRetries(final String path, final int version)
   private void deleteWithRetries(final String path, final int version)
       throws KeeperException, InterruptedException {
       throws KeeperException, InterruptedException {
     zkDoWithRetries(new ZKAction<Void>() {
     zkDoWithRetries(new ZKAction<Void>() {
+      @Override
       public Void run() throws KeeperException, InterruptedException {
       public Void run() throws KeeperException, InterruptedException {
         zkClient.delete(path, version);
         zkClient.delete(path, version);
         return null;
         return null;

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java

@@ -56,6 +56,7 @@ public interface HAServiceProtocol {
       this.name = name;
       this.name = name;
     }
     }
 
 
+    @Override
     public String toString() {
     public String toString() {
       return name;
       return name;
     }
     }

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java

@@ -184,6 +184,7 @@ public class NodeFencer {
       this.arg = arg;
       this.arg = arg;
     }
     }
     
     
+    @Override
     public String toString() {
     public String toString() {
       return method.getClass().getCanonicalName() + "(" + arg + ")";
       return method.getClass().getCanonicalName() + "(" + arg + ")";
     }
     }

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java

@@ -274,6 +274,7 @@ public class SshFenceByTcpPort extends Configured
     static final Log LOG = LogFactory.getLog(
     static final Log LOG = LogFactory.getLog(
         SshFenceByTcpPort.class.getName() + ".jsch");
         SshFenceByTcpPort.class.getName() + ".jsch");
 
 
+    @Override
     public boolean isEnabled(int level) {
     public boolean isEnabled(int level) {
       switch (level) {
       switch (level) {
       case com.jcraft.jsch.Logger.DEBUG:
       case com.jcraft.jsch.Logger.DEBUG:
@@ -291,6 +292,7 @@ public class SshFenceByTcpPort extends Configured
       }
       }
     }
     }
       
       
+    @Override
     public void log(int level, String message) {
     public void log(int level, String message) {
       switch (level) {
       switch (level) {
       case com.jcraft.jsch.Logger.DEBUG:
       case com.jcraft.jsch.Logger.DEBUG:

+ 4 - 5
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFCRpcServer.java

@@ -55,11 +55,10 @@ public class ZKFCRpcServer implements ZKFCProtocol {
         new ZKFCProtocolServerSideTranslatorPB(this);
         new ZKFCProtocolServerSideTranslatorPB(this);
     BlockingService service = ZKFCProtocolService
     BlockingService service = ZKFCProtocolService
         .newReflectiveBlockingService(translator);
         .newReflectiveBlockingService(translator);
-    this.server = RPC.getServer(
-        ZKFCProtocolPB.class,
-        service, bindAddr.getHostName(),
-            bindAddr.getPort(), HANDLER_COUNT, false, conf,
-            null /*secretManager*/);
+    this.server = new RPC.Builder(conf).setProtocol(ZKFCProtocolPB.class)
+        .setInstance(service).setBindAddress(bindAddr.getHostName())
+        .setPort(bindAddr.getPort()).setNumHandlers(HANDLER_COUNT)
+        .setVerbose(false).build();
     
     
     // set service-level authorization security policy
     // set service-level authorization security policy
     if (conf.getBoolean(
     if (conf.getBoolean(

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java

@@ -474,7 +474,7 @@ public class HttpServer implements FilterContainer {
     }
     }
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public void addFilter(String name, String classname,
   public void addFilter(String name, String classname,
       Map<String, String> parameters) {
       Map<String, String> parameters) {
 
 
@@ -494,7 +494,7 @@ public class HttpServer implements FilterContainer {
     filterNames.add(name);
     filterNames.add(name);
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public void addGlobalFilter(String name, String classname,
   public void addGlobalFilter(String name, String classname,
       Map<String, String> parameters) {
       Map<String, String> parameters) {
     final String[] ALL_URLS = { "/*" };
     final String[] ALL_URLS = { "/*" };

+ 4 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java

@@ -164,16 +164,18 @@ public abstract class AbstractMapWritable implements Writable, Configurable {
   }
   }
 
 
   /** @return the conf */
   /** @return the conf */
+  @Override
   public Configuration getConf() {
   public Configuration getConf() {
     return conf.get();
     return conf.get();
   }
   }
 
 
   /** @param conf the conf to set */
   /** @param conf the conf to set */
+  @Override
   public void setConf(Configuration conf) {
   public void setConf(Configuration conf) {
     this.conf.set(conf);
     this.conf.set(conf);
   }
   }
   
   
-  /** {@inheritDoc} */
+  @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     
     
     // First write out the size of the class table and any classes that are
     // First write out the size of the class table and any classes that are
@@ -187,7 +189,7 @@ public abstract class AbstractMapWritable implements Writable, Configurable {
     }
     }
   }
   }
   
   
-  /** {@inheritDoc} */
+  @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     
     
     // Get the number of "unknown" classes
     // Get the number of "unknown" classes

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayWritable.java

@@ -88,6 +88,7 @@ public class ArrayWritable implements Writable {
 
 
   public Writable[] get() { return values; }
   public Writable[] get() { return values; }
 
 
+  @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     values = new Writable[in.readInt()];          // construct values
     values = new Writable[in.readInt()];          // construct values
     for (int i = 0; i < values.length; i++) {
     for (int i = 0; i < values.length; i++) {
@@ -97,6 +98,7 @@ public class ArrayWritable implements Writable {
     }
     }
   }
   }
 
 
+  @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     out.writeInt(values.length);                 // write values
     out.writeInt(values.length);                 // write values
     for (int i = 0; i < values.length; i++) {
     for (int i = 0; i < values.length; i++) {

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java

@@ -57,12 +57,14 @@ public class BooleanWritable implements WritableComparable<BooleanWritable> {
 
 
   /**
   /**
    */
    */
+  @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     value = in.readBoolean();
     value = in.readBoolean();
   }
   }
 
 
   /**
   /**
    */
    */
+  @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     out.writeBoolean(value);
     out.writeBoolean(value);
   }
   }

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java

@@ -39,10 +39,12 @@ public class ByteWritable implements WritableComparable<ByteWritable> {
   /** Return the value of this ByteWritable. */
   /** Return the value of this ByteWritable. */
   public byte get() { return value; }
   public byte get() { return value; }
 
 
+  @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     value = in.readByte();
     value = in.readByte();
   }
   }
 
 
+  @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     out.writeByte(value);
     out.writeByte(value);
   }
   }

+ 4 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java

@@ -81,6 +81,7 @@ public class BytesWritable extends BinaryComparable
    * if you need the returned array to be precisely the length of the data.
    * if you need the returned array to be precisely the length of the data.
    * @return The data is only valid between 0 and getLength() - 1.
    * @return The data is only valid between 0 and getLength() - 1.
    */
    */
+  @Override
   public byte[] getBytes() {
   public byte[] getBytes() {
     return bytes;
     return bytes;
   }
   }
@@ -97,6 +98,7 @@ public class BytesWritable extends BinaryComparable
   /**
   /**
    * Get the current size of the buffer.
    * Get the current size of the buffer.
    */
    */
+  @Override
   public int getLength() {
   public int getLength() {
     return size;
     return size;
   }
   }
@@ -171,6 +173,7 @@ public class BytesWritable extends BinaryComparable
   }
   }
 
 
   // inherit javadoc
   // inherit javadoc
+  @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     setSize(0); // clear the old data
     setSize(0); // clear the old data
     setSize(in.readInt());
     setSize(in.readInt());
@@ -178,6 +181,7 @@ public class BytesWritable extends BinaryComparable
   }
   }
   
   
   // inherit javadoc
   // inherit javadoc
+  @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     out.writeInt(size);
     out.writeInt(size);
     out.write(bytes, 0, size);
     out.write(bytes, 0, size);

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java

@@ -45,6 +45,7 @@ public abstract class CompressedWritable implements Writable {
 
 
   public CompressedWritable() {}
   public CompressedWritable() {}
 
 
+  @Override
   public final void readFields(DataInput in) throws IOException {
   public final void readFields(DataInput in) throws IOException {
     compressed = new byte[in.readInt()];
     compressed = new byte[in.readInt()];
     in.readFully(compressed, 0, compressed.length);
     in.readFully(compressed, 0, compressed.length);
@@ -70,6 +71,7 @@ public abstract class CompressedWritable implements Writable {
   protected abstract void readFieldsCompressed(DataInput in)
   protected abstract void readFieldsCompressed(DataInput in)
     throws IOException;
     throws IOException;
 
 
+  @Override
   public final void write(DataOutput out) throws IOException {
   public final void write(DataOutput out) throws IOException {
     if (compressed == null) {
     if (compressed == null) {
       ByteArrayOutputStream deflated = new ByteArrayOutputStream();
       ByteArrayOutputStream deflated = new ByteArrayOutputStream();

+ 0 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputByteBuffer.java

@@ -21,8 +21,6 @@ package org.apache.hadoop.io;
 import java.io.DataInputStream;
 import java.io.DataInputStream;
 import java.io.InputStream;
 import java.io.InputStream;
 import java.nio.ByteBuffer;
 import java.nio.ByteBuffer;
-import java.util.LinkedList;
-import java.util.List;
 
 
 public class DataInputByteBuffer extends DataInputStream {
 public class DataInputByteBuffer extends DataInputStream {
 
 

+ 3 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java

@@ -72,6 +72,7 @@ public class DefaultStringifier<T> implements Stringifier<T> {
     }
     }
   }
   }
 
 
+  @Override
   public T fromString(String str) throws IOException {
   public T fromString(String str) throws IOException {
     try {
     try {
       byte[] bytes = Base64.decodeBase64(str.getBytes("UTF-8"));
       byte[] bytes = Base64.decodeBase64(str.getBytes("UTF-8"));
@@ -83,6 +84,7 @@ public class DefaultStringifier<T> implements Stringifier<T> {
     }
     }
   }
   }
 
 
+  @Override
   public String toString(T obj) throws IOException {
   public String toString(T obj) throws IOException {
     outBuf.reset();
     outBuf.reset();
     serializer.serialize(obj);
     serializer.serialize(obj);
@@ -91,6 +93,7 @@ public class DefaultStringifier<T> implements Stringifier<T> {
     return new String(Base64.encodeBase64(buf));
     return new String(Base64.encodeBase64(buf));
   }
   }
 
 
+  @Override
   public void close() throws IOException {
   public void close() throws IOException {
     inBuf.close();
     inBuf.close();
     outBuf.close();
     outBuf.close();

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java

@@ -42,10 +42,12 @@ public class DoubleWritable implements WritableComparable<DoubleWritable> {
     set(value);
     set(value);
   }
   }
   
   
+  @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     value = in.readDouble();
     value = in.readDouble();
   }
   }
 
 
+  @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     out.writeDouble(value);
     out.writeDouble(value);
   }
   }

+ 8 - 7
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java

@@ -23,7 +23,6 @@ import java.io.DataOutput;
 import java.io.IOException;
 import java.io.IOException;
 import java.util.EnumSet;
 import java.util.EnumSet;
 import java.util.Iterator;
 import java.util.Iterator;
-import java.util.Collection;
 import java.util.AbstractCollection;
 import java.util.AbstractCollection;
 
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -46,8 +45,11 @@ public class EnumSetWritable<E extends Enum<E>> extends AbstractCollection<E>
   EnumSetWritable() {
   EnumSetWritable() {
   }
   }
 
 
+  @Override
   public Iterator<E> iterator() { return value.iterator(); }
   public Iterator<E> iterator() { return value.iterator(); }
+  @Override
   public int size() { return value.size(); }
   public int size() { return value.size(); }
+  @Override
   public boolean add(E e) {
   public boolean add(E e) {
     if (value == null) {
     if (value == null) {
       value = EnumSet.of(e);
       value = EnumSet.of(e);
@@ -109,7 +111,7 @@ public class EnumSetWritable<E extends Enum<E>> extends AbstractCollection<E>
     return value;
     return value;
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   @SuppressWarnings("unchecked")
   @SuppressWarnings("unchecked")
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     int length = in.readInt();
     int length = in.readInt();
@@ -127,7 +129,7 @@ public class EnumSetWritable<E extends Enum<E>> extends AbstractCollection<E>
     }
     }
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     if (this.value == null) {
     if (this.value == null) {
       out.writeInt(-1);
       out.writeInt(-1);
@@ -152,6 +154,7 @@ public class EnumSetWritable<E extends Enum<E>> extends AbstractCollection<E>
    * Returns true if <code>o</code> is an EnumSetWritable with the same value,
    * Returns true if <code>o</code> is an EnumSetWritable with the same value,
    * or both are null.
    * or both are null.
    */
    */
+  @Override
   public boolean equals(Object o) {
   public boolean equals(Object o) {
     if (o == null) {
     if (o == null) {
       throw new IllegalArgumentException("null argument passed in equal().");
       throw new IllegalArgumentException("null argument passed in equal().");
@@ -180,27 +183,25 @@ public class EnumSetWritable<E extends Enum<E>> extends AbstractCollection<E>
     return elementType;
     return elementType;
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public int hashCode() {
   public int hashCode() {
     if (value == null)
     if (value == null)
       return 0;
       return 0;
     return (int) value.hashCode();
     return (int) value.hashCode();
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public String toString() {
   public String toString() {
     if (value == null)
     if (value == null)
       return "(null)";
       return "(null)";
     return value.toString();
     return value.toString();
   }
   }
 
 
-  /** {@inheritDoc} */
   @Override
   @Override
   public Configuration getConf() {
   public Configuration getConf() {
     return this.conf;
     return this.conf;
   }
   }
 
 
-  /** {@inheritDoc} */
   @Override
   @Override
   public void setConf(Configuration conf) {
   public void setConf(Configuration conf) {
     this.conf = conf;
     this.conf = conf;

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java

@@ -39,10 +39,12 @@ public class FloatWritable implements WritableComparable<FloatWritable> {
   /** Return the value of this FloatWritable. */
   /** Return the value of this FloatWritable. */
   public float get() { return value; }
   public float get() { return value; }
 
 
+  @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     value = in.readFloat();
     value = in.readFloat();
   }
   }
 
 
+  @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     out.writeFloat(value);
     out.writeFloat(value);
   }
   }

+ 5 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java

@@ -114,11 +114,13 @@ public abstract class GenericWritable implements Writable, Configurable {
     return instance;
     return instance;
   }
   }
   
   
+  @Override
   public String toString() {
   public String toString() {
     return "GW[" + (instance != null ? ("class=" + instance.getClass().getName() +
     return "GW[" + (instance != null ? ("class=" + instance.getClass().getName() +
         ",value=" + instance.toString()) : "(null)") + "]";
         ",value=" + instance.toString()) : "(null)") + "]";
   }
   }
 
 
+  @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     type = in.readByte();
     type = in.readByte();
     Class<? extends Writable> clazz = getTypes()[type & 0xff];
     Class<? extends Writable> clazz = getTypes()[type & 0xff];
@@ -131,6 +133,7 @@ public abstract class GenericWritable implements Writable, Configurable {
     instance.readFields(in);
     instance.readFields(in);
   }
   }
 
 
+  @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     if (type == NOT_SET || instance == null)
     if (type == NOT_SET || instance == null)
       throw new IOException("The GenericWritable has NOT been set correctly. type="
       throw new IOException("The GenericWritable has NOT been set correctly. type="
@@ -145,10 +148,12 @@ public abstract class GenericWritable implements Writable, Configurable {
    */
    */
   abstract protected Class<? extends Writable>[] getTypes();
   abstract protected Class<? extends Writable>[] getTypes();
 
 
+  @Override
   public Configuration getConf() {
   public Configuration getConf() {
     return conf;
     return conf;
   }
   }
 
 
+  @Override
   public void setConf(Configuration conf) {
   public void setConf(Configuration conf) {
     this.conf = conf;
     this.conf = conf;
   }
   }

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java

@@ -272,9 +272,11 @@ public class IOUtils {
    * The /dev/null of OutputStreams.
    * The /dev/null of OutputStreams.
    */
    */
   public static class NullOutputStream extends OutputStream {
   public static class NullOutputStream extends OutputStream {
+    @Override
     public void write(byte[] b, int off, int len) throws IOException {
     public void write(byte[] b, int off, int len) throws IOException {
     }
     }
 
 
+    @Override
     public void write(int b) throws IOException {
     public void write(int b) throws IOException {
     }
     }
   }  
   }  

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java

@@ -42,10 +42,12 @@ public class IntWritable implements WritableComparable<IntWritable> {
   /** Return the value of this IntWritable. */
   /** Return the value of this IntWritable. */
   public int get() { return value; }
   public int get() { return value; }
 
 
+  @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     value = in.readInt();
     value = in.readInt();
   }
   }
 
 
+  @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     out.writeInt(value);
     out.writeInt(value);
   }
   }

+ 8 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java

@@ -42,15 +42,18 @@ public class LongWritable implements WritableComparable<LongWritable> {
   /** Return the value of this LongWritable. */
   /** Return the value of this LongWritable. */
   public long get() { return value; }
   public long get() { return value; }
 
 
+  @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     value = in.readLong();
     value = in.readLong();
   }
   }
 
 
+  @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     out.writeLong(value);
     out.writeLong(value);
   }
   }
 
 
   /** Returns true iff <code>o</code> is a LongWritable with the same value. */
   /** Returns true iff <code>o</code> is a LongWritable with the same value. */
+  @Override
   public boolean equals(Object o) {
   public boolean equals(Object o) {
     if (!(o instanceof LongWritable))
     if (!(o instanceof LongWritable))
       return false;
       return false;
@@ -58,17 +61,20 @@ public class LongWritable implements WritableComparable<LongWritable> {
     return this.value == other.value;
     return this.value == other.value;
   }
   }
 
 
+  @Override
   public int hashCode() {
   public int hashCode() {
     return (int)value;
     return (int)value;
   }
   }
 
 
   /** Compares two LongWritables. */
   /** Compares two LongWritables. */
+  @Override
   public int compareTo(LongWritable o) {
   public int compareTo(LongWritable o) {
     long thisValue = this.value;
     long thisValue = this.value;
     long thatValue = o.value;
     long thatValue = o.value;
     return (thisValue<thatValue ? -1 : (thisValue==thatValue ? 0 : 1));
     return (thisValue<thatValue ? -1 : (thisValue==thatValue ? 0 : 1));
   }
   }
 
 
+  @Override
   public String toString() {
   public String toString() {
     return Long.toString(value);
     return Long.toString(value);
   }
   }
@@ -79,6 +85,7 @@ public class LongWritable implements WritableComparable<LongWritable> {
       super(LongWritable.class);
       super(LongWritable.class);
     }
     }
 
 
+    @Override
     public int compare(byte[] b1, int s1, int l1,
     public int compare(byte[] b1, int s1, int l1,
                        byte[] b2, int s2, int l2) {
                        byte[] b2, int s2, int l2) {
       long thisValue = readLong(b1, s1);
       long thisValue = readLong(b1, s1);
@@ -94,6 +101,7 @@ public class LongWritable implements WritableComparable<LongWritable> {
     public int compare(WritableComparable a, WritableComparable b) {
     public int compare(WritableComparable a, WritableComparable b) {
       return -super.compare(a, b);
       return -super.compare(a, b);
     }
     }
+    @Override
     public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
     public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
       return -super.compare(b1, s1, l1, b2, s2, l2);
       return -super.compare(b1, s1, l1, b2, s2, l2);
     }
     }

+ 8 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java

@@ -36,6 +36,7 @@ public class MD5Hash implements WritableComparable<MD5Hash> {
   public static final int MD5_LEN = 16;
   public static final int MD5_LEN = 16;
 
 
   private static ThreadLocal<MessageDigest> DIGESTER_FACTORY = new ThreadLocal<MessageDigest>() {
   private static ThreadLocal<MessageDigest> DIGESTER_FACTORY = new ThreadLocal<MessageDigest>() {
+    @Override
     protected MessageDigest initialValue() {
     protected MessageDigest initialValue() {
       try {
       try {
         return MessageDigest.getInstance("MD5");
         return MessageDigest.getInstance("MD5");
@@ -65,6 +66,7 @@ public class MD5Hash implements WritableComparable<MD5Hash> {
   }
   }
   
   
   // javadoc from Writable
   // javadoc from Writable
+  @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     in.readFully(digest);
     in.readFully(digest);
   }
   }
@@ -77,6 +79,7 @@ public class MD5Hash implements WritableComparable<MD5Hash> {
   }
   }
 
 
   // javadoc from Writable
   // javadoc from Writable
+  @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     out.write(digest);
     out.write(digest);
   }
   }
@@ -155,6 +158,7 @@ public class MD5Hash implements WritableComparable<MD5Hash> {
 
 
   /** Returns true iff <code>o</code> is an MD5Hash whose digest contains the
   /** Returns true iff <code>o</code> is an MD5Hash whose digest contains the
    * same values.  */
    * same values.  */
+  @Override
   public boolean equals(Object o) {
   public boolean equals(Object o) {
     if (!(o instanceof MD5Hash))
     if (!(o instanceof MD5Hash))
       return false;
       return false;
@@ -165,12 +169,14 @@ public class MD5Hash implements WritableComparable<MD5Hash> {
   /** Returns a hash code value for this object.
   /** Returns a hash code value for this object.
    * Only uses the first 4 bytes, since md5s are evenly distributed.
    * Only uses the first 4 bytes, since md5s are evenly distributed.
    */
    */
+  @Override
   public int hashCode() {
   public int hashCode() {
     return quarterDigest();
     return quarterDigest();
   }
   }
 
 
 
 
   /** Compares this object with the specified object for order.*/
   /** Compares this object with the specified object for order.*/
+  @Override
   public int compareTo(MD5Hash that) {
   public int compareTo(MD5Hash that) {
     return WritableComparator.compareBytes(this.digest, 0, MD5_LEN,
     return WritableComparator.compareBytes(this.digest, 0, MD5_LEN,
                                            that.digest, 0, MD5_LEN);
                                            that.digest, 0, MD5_LEN);
@@ -182,6 +188,7 @@ public class MD5Hash implements WritableComparable<MD5Hash> {
       super(MD5Hash.class);
       super(MD5Hash.class);
     }
     }
 
 
+    @Override
     public int compare(byte[] b1, int s1, int l1,
     public int compare(byte[] b1, int s1, int l1,
                        byte[] b2, int s2, int l2) {
                        byte[] b2, int s2, int l2) {
       return compareBytes(b1, s1, MD5_LEN, b2, s2, MD5_LEN);
       return compareBytes(b1, s1, MD5_LEN, b2, s2, MD5_LEN);
@@ -196,6 +203,7 @@ public class MD5Hash implements WritableComparable<MD5Hash> {
   {'0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f'};
   {'0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f'};
 
 
   /** Returns a string representation of this object. */
   /** Returns a string representation of this object. */
+  @Override
   public String toString() {
   public String toString() {
     StringBuilder buf = new StringBuilder(MD5_LEN*2);
     StringBuilder buf = new StringBuilder(MD5_LEN*2);
     for (int i = 0; i < MD5_LEN; i++) {
     for (int i = 0; i < MD5_LEN; i++) {

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java

@@ -296,6 +296,7 @@ public class MapFile {
     }
     }
 
 
     /** Close the map. */
     /** Close the map. */
+    @Override
     public synchronized void close() throws IOException {
     public synchronized void close() throws IOException {
       data.close();
       data.close();
       index.close();
       index.close();
@@ -723,6 +724,7 @@ public class MapFile {
     }
     }
 
 
     /** Close the map. */
     /** Close the map. */
+    @Override
     public synchronized void close() throws IOException {
     public synchronized void close() throws IOException {
       if (!indexClosed) {
       if (!indexClosed) {
         index.close();
         index.close();

+ 14 - 16
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java

@@ -55,27 +55,27 @@ public class MapWritable extends AbstractMapWritable
     copy(other);
     copy(other);
   }
   }
   
   
-  /** {@inheritDoc} */
+  @Override
   public void clear() {
   public void clear() {
     instance.clear();
     instance.clear();
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public boolean containsKey(Object key) {
   public boolean containsKey(Object key) {
     return instance.containsKey(key);
     return instance.containsKey(key);
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public boolean containsValue(Object value) {
   public boolean containsValue(Object value) {
     return instance.containsValue(value);
     return instance.containsValue(value);
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public Set<Map.Entry<Writable, Writable>> entrySet() {
   public Set<Map.Entry<Writable, Writable>> entrySet() {
     return instance.entrySet();
     return instance.entrySet();
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public boolean equals(Object obj) {
   public boolean equals(Object obj) {
     if (this == obj) {
     if (this == obj) {
       return true;
       return true;
@@ -93,27 +93,27 @@ public class MapWritable extends AbstractMapWritable
     return false;
     return false;
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public Writable get(Object key) {
   public Writable get(Object key) {
     return instance.get(key);
     return instance.get(key);
   }
   }
   
   
-  /** {@inheritDoc} */
+  @Override
   public int hashCode() {
   public int hashCode() {
     return 1 + this.instance.hashCode();
     return 1 + this.instance.hashCode();
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public boolean isEmpty() {
   public boolean isEmpty() {
     return instance.isEmpty();
     return instance.isEmpty();
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public Set<Writable> keySet() {
   public Set<Writable> keySet() {
     return instance.keySet();
     return instance.keySet();
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   @SuppressWarnings("unchecked")
   @SuppressWarnings("unchecked")
   public Writable put(Writable key, Writable value) {
   public Writable put(Writable key, Writable value) {
     addToMap(key.getClass());
     addToMap(key.getClass());
@@ -121,31 +121,30 @@ public class MapWritable extends AbstractMapWritable
     return instance.put(key, value);
     return instance.put(key, value);
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public void putAll(Map<? extends Writable, ? extends Writable> t) {
   public void putAll(Map<? extends Writable, ? extends Writable> t) {
     for (Map.Entry<? extends Writable, ? extends Writable> e: t.entrySet()) {
     for (Map.Entry<? extends Writable, ? extends Writable> e: t.entrySet()) {
       put(e.getKey(), e.getValue());
       put(e.getKey(), e.getValue());
     }
     }
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public Writable remove(Object key) {
   public Writable remove(Object key) {
     return instance.remove(key);
     return instance.remove(key);
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public int size() {
   public int size() {
     return instance.size();
     return instance.size();
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public Collection<Writable> values() {
   public Collection<Writable> values() {
     return instance.values();
     return instance.values();
   }
   }
   
   
   // Writable
   // Writable
   
   
-  /** {@inheritDoc} */
   @Override
   @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     super.write(out);
     super.write(out);
@@ -164,7 +163,6 @@ public class MapWritable extends AbstractMapWritable
     }
     }
   }
   }
 
 
-  /** {@inheritDoc} */
   @SuppressWarnings("unchecked")
   @SuppressWarnings("unchecked")
   @Override
   @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {

+ 4 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java

@@ -35,6 +35,7 @@ public class NullWritable implements WritableComparable<NullWritable> {
   /** Returns the single instance of this class. */
   /** Returns the single instance of this class. */
   public static NullWritable get() { return THIS; }
   public static NullWritable get() { return THIS; }
   
   
+  @Override
   public String toString() {
   public String toString() {
     return "(null)";
     return "(null)";
   }
   }
@@ -46,8 +47,11 @@ public class NullWritable implements WritableComparable<NullWritable> {
   public int compareTo(NullWritable other) {
   public int compareTo(NullWritable other) {
     return 0;
     return 0;
   }
   }
+  @Override
   public boolean equals(Object other) { return other instanceof NullWritable; }
   public boolean equals(Object other) { return other instanceof NullWritable; }
+  @Override
   public void readFields(DataInput in) throws IOException {}
   public void readFields(DataInput in) throws IOException {}
+  @Override
   public void write(DataOutput out) throws IOException {}
   public void write(DataOutput out) throws IOException {}
 
 
   /** A Comparator &quot;optimized&quot; for NullWritable. */
   /** A Comparator &quot;optimized&quot; for NullWritable. */

+ 7 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java

@@ -66,15 +66,18 @@ public class ObjectWritable implements Writable, Configurable {
     this.instance = instance;
     this.instance = instance;
   }
   }
   
   
+  @Override
   public String toString() {
   public String toString() {
     return "OW[class=" + declaredClass + ",value=" + instance + "]";
     return "OW[class=" + declaredClass + ",value=" + instance + "]";
   }
   }
 
 
   
   
+  @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     readObject(in, this, this.conf);
     readObject(in, this, this.conf);
   }
   }
   
   
+  @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     writeObject(out, instance, declaredClass, conf);
     writeObject(out, instance, declaredClass, conf);
   }
   }
@@ -99,6 +102,7 @@ public class ObjectWritable implements Writable, Configurable {
       super(conf);
       super(conf);
       this.declaredClass = declaredClass;
       this.declaredClass = declaredClass;
     }
     }
+    @Override
     public void readFields(DataInput in) throws IOException {
     public void readFields(DataInput in) throws IOException {
       String className = UTF8.readString(in);
       String className = UTF8.readString(in);
       declaredClass = PRIMITIVE_NAMES.get(className);
       declaredClass = PRIMITIVE_NAMES.get(className);
@@ -110,6 +114,7 @@ public class ObjectWritable implements Writable, Configurable {
         }
         }
       }
       }
     }
     }
+    @Override
     public void write(DataOutput out) throws IOException {
     public void write(DataOutput out) throws IOException {
       UTF8.writeString(out, declaredClass.getName());
       UTF8.writeString(out, declaredClass.getName());
     }
     }
@@ -375,10 +380,12 @@ public class ObjectWritable implements Writable, Configurable {
     return declaredClass;
     return declaredClass;
   }
   }
 
 
+  @Override
   public void setConf(Configuration conf) {
   public void setConf(Configuration conf) {
     this.conf = conf;
     this.conf = conf;
   }
   }
 
 
+  @Override
   public Configuration getConf() {
   public Configuration getConf() {
     return this.conf;
     return this.conf;
   }
   }

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java

@@ -50,6 +50,7 @@ public class OutputBuffer extends FilterOutputStream {
   private static class Buffer extends ByteArrayOutputStream {
   private static class Buffer extends ByteArrayOutputStream {
     public byte[] getData() { return buf; }
     public byte[] getData() { return buf; }
     public int getLength() { return count; }
     public int getLength() { return count; }
+    @Override
     public void reset() { count = 0; }
     public void reset() { count = 0; }
 
 
     public void write(InputStream in, int len) throws IOException {
     public void write(InputStream in, int len) throws IOException {

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java

@@ -194,6 +194,7 @@ public class ReadaheadPool {
       this.len = len;
       this.len = len;
     }
     }
     
     
+    @Override
     public void run() {
     public void run() {
       if (canceled) return;
       if (canceled) return;
       // There's a very narrow race here that the file will close right at
       // There's a very narrow race here that the file will close right at

+ 0 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java

@@ -24,7 +24,6 @@ import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.IOException;
 
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.permission.FsPermission;

+ 32 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java

@@ -625,15 +625,18 @@ public class SequenceFile {
       dataSize = length;
       dataSize = length;
     }
     }
     
     
+    @Override
     public int getSize() {
     public int getSize() {
       return dataSize;
       return dataSize;
     }
     }
     
     
+    @Override
     public void writeUncompressedBytes(DataOutputStream outStream)
     public void writeUncompressedBytes(DataOutputStream outStream)
       throws IOException {
       throws IOException {
       outStream.write(data, 0, dataSize);
       outStream.write(data, 0, dataSize);
     }
     }
 
 
+    @Override
     public void writeCompressedBytes(DataOutputStream outStream) 
     public void writeCompressedBytes(DataOutputStream outStream) 
       throws IllegalArgumentException, IOException {
       throws IllegalArgumentException, IOException {
       throw 
       throw 
@@ -666,10 +669,12 @@ public class SequenceFile {
       dataSize = length;
       dataSize = length;
     }
     }
     
     
+    @Override
     public int getSize() {
     public int getSize() {
       return dataSize;
       return dataSize;
     }
     }
     
     
+    @Override
     public void writeUncompressedBytes(DataOutputStream outStream)
     public void writeUncompressedBytes(DataOutputStream outStream)
       throws IOException {
       throws IOException {
       if (decompressedStream == null) {
       if (decompressedStream == null) {
@@ -687,6 +692,7 @@ public class SequenceFile {
       }
       }
     }
     }
 
 
+    @Override
     public void writeCompressedBytes(DataOutputStream outStream) 
     public void writeCompressedBytes(DataOutputStream outStream) 
       throws IllegalArgumentException, IOException {
       throws IllegalArgumentException, IOException {
       outStream.write(data, 0, dataSize);
       outStream.write(data, 0, dataSize);
@@ -728,6 +734,7 @@ public class SequenceFile {
       return new TreeMap<Text, Text>(this.theMetadata);
       return new TreeMap<Text, Text>(this.theMetadata);
     }
     }
     
     
+    @Override
     public void write(DataOutput out) throws IOException {
     public void write(DataOutput out) throws IOException {
       out.writeInt(this.theMetadata.size());
       out.writeInt(this.theMetadata.size());
       Iterator<Map.Entry<Text, Text>> iter =
       Iterator<Map.Entry<Text, Text>> iter =
@@ -739,6 +746,7 @@ public class SequenceFile {
       }
       }
     }
     }
 
 
+    @Override
     public void readFields(DataInput in) throws IOException {
     public void readFields(DataInput in) throws IOException {
       int sz = in.readInt();
       int sz = in.readInt();
       if (sz < 0) throw new IOException("Invalid size: " + sz + " for file metadata object");
       if (sz < 0) throw new IOException("Invalid size: " + sz + " for file metadata object");
@@ -752,6 +760,7 @@ public class SequenceFile {
       }    
       }    
     }
     }
 
 
+    @Override
     public boolean equals(Object other) {
     public boolean equals(Object other) {
       if (other == null) {
       if (other == null) {
         return false;
         return false;
@@ -788,11 +797,13 @@ public class SequenceFile {
       return true;
       return true;
     }
     }
 
 
+    @Override
     public int hashCode() {
     public int hashCode() {
       assert false : "hashCode not designed";
       assert false : "hashCode not designed";
       return 42; // any arbitrary constant will do 
       return 42; // any arbitrary constant will do 
     }
     }
     
     
+    @Override
     public String toString() {
     public String toString() {
       StringBuilder sb = new StringBuilder();
       StringBuilder sb = new StringBuilder();
       sb.append("size: ").append(this.theMetadata.size()).append("\n");
       sb.append("size: ").append(this.theMetadata.size()).append("\n");
@@ -1250,6 +1261,7 @@ public class SequenceFile {
     Configuration getConf() { return conf; }
     Configuration getConf() { return conf; }
     
     
     /** Close the file. */
     /** Close the file. */
+    @Override
     public synchronized void close() throws IOException {
     public synchronized void close() throws IOException {
       keySerializer.close();
       keySerializer.close();
       uncompressedValSerializer.close();
       uncompressedValSerializer.close();
@@ -1360,6 +1372,7 @@ public class SequenceFile {
     }
     }
 
 
     /** Append a key/value pair. */
     /** Append a key/value pair. */
+    @Override
     @SuppressWarnings("unchecked")
     @SuppressWarnings("unchecked")
     public synchronized void append(Object key, Object val)
     public synchronized void append(Object key, Object val)
       throws IOException {
       throws IOException {
@@ -1392,6 +1405,7 @@ public class SequenceFile {
     }
     }
 
 
     /** Append a key/value pair. */
     /** Append a key/value pair. */
+    @Override
     public synchronized void appendRaw(byte[] keyData, int keyOffset,
     public synchronized void appendRaw(byte[] keyData, int keyOffset,
         int keyLength, ValueBytes val) throws IOException {
         int keyLength, ValueBytes val) throws IOException {
 
 
@@ -1449,6 +1463,7 @@ public class SequenceFile {
     }
     }
     
     
     /** Compress and flush contents to dfs */
     /** Compress and flush contents to dfs */
+    @Override
     public synchronized void sync() throws IOException {
     public synchronized void sync() throws IOException {
       if (noBufferedRecords > 0) {
       if (noBufferedRecords > 0) {
         super.sync();
         super.sync();
@@ -1478,6 +1493,7 @@ public class SequenceFile {
     }
     }
     
     
     /** Close the file. */
     /** Close the file. */
+    @Override
     public synchronized void close() throws IOException {
     public synchronized void close() throws IOException {
       if (out != null) {
       if (out != null) {
         sync();
         sync();
@@ -1486,6 +1502,7 @@ public class SequenceFile {
     }
     }
 
 
     /** Append a key/value pair. */
     /** Append a key/value pair. */
+    @Override
     @SuppressWarnings("unchecked")
     @SuppressWarnings("unchecked")
     public synchronized void append(Object key, Object val)
     public synchronized void append(Object key, Object val)
       throws IOException {
       throws IOException {
@@ -1518,6 +1535,7 @@ public class SequenceFile {
     }
     }
     
     
     /** Append a key/value pair. */
     /** Append a key/value pair. */
+    @Override
     public synchronized void appendRaw(byte[] keyData, int keyOffset,
     public synchronized void appendRaw(byte[] keyData, int keyOffset,
         int keyLength, ValueBytes val) throws IOException {
         int keyLength, ValueBytes val) throws IOException {
       
       
@@ -1960,6 +1978,7 @@ public class SequenceFile {
     }
     }
     
     
     /** Close the file. */
     /** Close the file. */
+    @Override
     public synchronized void close() throws IOException {
     public synchronized void close() throws IOException {
       // Return the decompressors to the pool
       // Return the decompressors to the pool
       CodecPool.returnDecompressor(keyLenDecompressor);
       CodecPool.returnDecompressor(keyLenDecompressor);
@@ -2618,6 +2637,7 @@ public class SequenceFile {
     }
     }
 
 
     /** Returns the name of the file. */
     /** Returns the name of the file. */
+    @Override
     public String toString() {
     public String toString() {
       return filename;
       return filename;
     }
     }
@@ -2948,6 +2968,7 @@ public class SequenceFile {
         mergeSort.mergeSort(pointersCopy, pointers, 0, count);
         mergeSort.mergeSort(pointersCopy, pointers, 0, count);
       }
       }
       class SeqFileComparator implements Comparator<IntWritable> {
       class SeqFileComparator implements Comparator<IntWritable> {
+        @Override
         public int compare(IntWritable I, IntWritable J) {
         public int compare(IntWritable I, IntWritable J) {
           return comparator.compare(rawBuffer, keyOffsets[I.get()], 
           return comparator.compare(rawBuffer, keyOffsets[I.get()], 
                                     keyLengths[I.get()], rawBuffer, 
                                     keyLengths[I.get()], rawBuffer, 
@@ -3221,6 +3242,7 @@ public class SequenceFile {
         this.tmpDir = tmpDir;
         this.tmpDir = tmpDir;
         this.progress = progress;
         this.progress = progress;
       }
       }
+      @Override
       protected boolean lessThan(Object a, Object b) {
       protected boolean lessThan(Object a, Object b) {
         // indicate we're making progress
         // indicate we're making progress
         if (progress != null) {
         if (progress != null) {
@@ -3232,6 +3254,7 @@ public class SequenceFile {
                                   msa.getKey().getLength(), msb.getKey().getData(), 0, 
                                   msa.getKey().getLength(), msb.getKey().getData(), 0, 
                                   msb.getKey().getLength()) < 0;
                                   msb.getKey().getLength()) < 0;
       }
       }
+      @Override
       public void close() throws IOException {
       public void close() throws IOException {
         SegmentDescriptor ms;                           // close inputs
         SegmentDescriptor ms;                           // close inputs
         while ((ms = (SegmentDescriptor)pop()) != null) {
         while ((ms = (SegmentDescriptor)pop()) != null) {
@@ -3239,12 +3262,15 @@ public class SequenceFile {
         }
         }
         minSegment = null;
         minSegment = null;
       }
       }
+      @Override
       public DataOutputBuffer getKey() throws IOException {
       public DataOutputBuffer getKey() throws IOException {
         return rawKey;
         return rawKey;
       }
       }
+      @Override
       public ValueBytes getValue() throws IOException {
       public ValueBytes getValue() throws IOException {
         return rawValue;
         return rawValue;
       }
       }
+      @Override
       public boolean next() throws IOException {
       public boolean next() throws IOException {
         if (size() == 0)
         if (size() == 0)
           return false;
           return false;
@@ -3272,6 +3298,7 @@ public class SequenceFile {
         return true;
         return true;
       }
       }
       
       
+      @Override
       public Progress getProgress() {
       public Progress getProgress() {
         return mergeProgress; 
         return mergeProgress; 
       }
       }
@@ -3469,6 +3496,7 @@ public class SequenceFile {
         return preserveInput;
         return preserveInput;
       }
       }
       
       
+      @Override
       public int compareTo(Object o) {
       public int compareTo(Object o) {
         SegmentDescriptor that = (SegmentDescriptor)o;
         SegmentDescriptor that = (SegmentDescriptor)o;
         if (this.segmentLength != that.segmentLength) {
         if (this.segmentLength != that.segmentLength) {
@@ -3481,6 +3509,7 @@ public class SequenceFile {
           compareTo(that.segmentPathName.toString());
           compareTo(that.segmentPathName.toString());
       }
       }
 
 
+      @Override
       public boolean equals(Object o) {
       public boolean equals(Object o) {
         if (!(o instanceof SegmentDescriptor)) {
         if (!(o instanceof SegmentDescriptor)) {
           return false;
           return false;
@@ -3495,6 +3524,7 @@ public class SequenceFile {
         return false;
         return false;
       }
       }
 
 
+      @Override
       public int hashCode() {
       public int hashCode() {
         return 37 * 17 + (int) (segmentOffset^(segmentOffset>>>32));
         return 37 * 17 + (int) (segmentOffset^(segmentOffset>>>32));
       }
       }
@@ -3584,12 +3614,14 @@ public class SequenceFile {
       /** The default cleanup. Subclasses can override this with a custom 
       /** The default cleanup. Subclasses can override this with a custom 
        * cleanup 
        * cleanup 
        */
        */
+      @Override
       public void cleanup() throws IOException {
       public void cleanup() throws IOException {
         super.close();
         super.close();
         if (super.shouldPreserveInput()) return;
         if (super.shouldPreserveInput()) return;
         parentContainer.cleanup();
         parentContainer.cleanup();
       }
       }
       
       
+      @Override
       public boolean equals(Object o) {
       public boolean equals(Object o) {
         if (!(o instanceof LinkedSegmentsDescriptor)) {
         if (!(o instanceof LinkedSegmentsDescriptor)) {
           return false;
           return false;

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java

@@ -87,6 +87,7 @@ public class SetFile extends MapFile {
     }
     }
 
 
     // javadoc inherited
     // javadoc inherited
+    @Override
     public boolean seek(WritableComparable key)
     public boolean seek(WritableComparable key)
       throws IOException {
       throws IOException {
       return super.seek(key);
       return super.seek(key);

+ 18 - 20
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java

@@ -57,86 +57,86 @@ public class SortedMapWritable extends AbstractMapWritable
     copy(other);
     copy(other);
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public Comparator<? super WritableComparable> comparator() {
   public Comparator<? super WritableComparable> comparator() {
     // Returning null means we use the natural ordering of the keys
     // Returning null means we use the natural ordering of the keys
     return null;
     return null;
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public WritableComparable firstKey() {
   public WritableComparable firstKey() {
     return instance.firstKey();
     return instance.firstKey();
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public SortedMap<WritableComparable, Writable>
   public SortedMap<WritableComparable, Writable>
   headMap(WritableComparable toKey) {
   headMap(WritableComparable toKey) {
     
     
     return instance.headMap(toKey);
     return instance.headMap(toKey);
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public WritableComparable lastKey() {
   public WritableComparable lastKey() {
     return instance.lastKey();
     return instance.lastKey();
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public SortedMap<WritableComparable, Writable>
   public SortedMap<WritableComparable, Writable>
   subMap(WritableComparable fromKey, WritableComparable toKey) {
   subMap(WritableComparable fromKey, WritableComparable toKey) {
     
     
     return instance.subMap(fromKey, toKey);
     return instance.subMap(fromKey, toKey);
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public SortedMap<WritableComparable, Writable>
   public SortedMap<WritableComparable, Writable>
   tailMap(WritableComparable fromKey) {
   tailMap(WritableComparable fromKey) {
     
     
     return instance.tailMap(fromKey);
     return instance.tailMap(fromKey);
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public void clear() {
   public void clear() {
     instance.clear();
     instance.clear();
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public boolean containsKey(Object key) {
   public boolean containsKey(Object key) {
     return instance.containsKey(key);
     return instance.containsKey(key);
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public boolean containsValue(Object value) {
   public boolean containsValue(Object value) {
     return instance.containsValue(value);
     return instance.containsValue(value);
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public Set<java.util.Map.Entry<WritableComparable, Writable>> entrySet() {
   public Set<java.util.Map.Entry<WritableComparable, Writable>> entrySet() {
     return instance.entrySet();
     return instance.entrySet();
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public Writable get(Object key) {
   public Writable get(Object key) {
     return instance.get(key);
     return instance.get(key);
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public boolean isEmpty() {
   public boolean isEmpty() {
     return instance.isEmpty();
     return instance.isEmpty();
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public Set<WritableComparable> keySet() {
   public Set<WritableComparable> keySet() {
     return instance.keySet();
     return instance.keySet();
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public Writable put(WritableComparable key, Writable value) {
   public Writable put(WritableComparable key, Writable value) {
     addToMap(key.getClass());
     addToMap(key.getClass());
     addToMap(value.getClass());
     addToMap(value.getClass());
     return instance.put(key, value);
     return instance.put(key, value);
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public void putAll(Map<? extends WritableComparable, ? extends Writable> t) {
   public void putAll(Map<? extends WritableComparable, ? extends Writable> t) {
     for (Map.Entry<? extends WritableComparable, ? extends Writable> e:
     for (Map.Entry<? extends WritableComparable, ? extends Writable> e:
       t.entrySet()) {
       t.entrySet()) {
@@ -145,22 +145,21 @@ public class SortedMapWritable extends AbstractMapWritable
     }
     }
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public Writable remove(Object key) {
   public Writable remove(Object key) {
     return instance.remove(key);
     return instance.remove(key);
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public int size() {
   public int size() {
     return instance.size();
     return instance.size();
   }
   }
 
 
-  /** {@inheritDoc} */
+  @Override
   public Collection<Writable> values() {
   public Collection<Writable> values() {
     return instance.values();
     return instance.values();
   }
   }
 
 
-  /** {@inheritDoc} */
   @SuppressWarnings("unchecked")
   @SuppressWarnings("unchecked")
   @Override
   @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
@@ -187,7 +186,6 @@ public class SortedMapWritable extends AbstractMapWritable
     }
     }
   }
   }
 
 
-  /** {@inheritDoc} */
   @Override
   @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     super.write(out);
     super.write(out);

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Stringifier.java

@@ -54,6 +54,7 @@ public interface Stringifier<T> extends java.io.Closeable {
    * Closes this object. 
    * Closes this object. 
    * @throws IOException if an I/O error occurs 
    * @throws IOException if an I/O error occurs 
    * */
    * */
+  @Override
   public void close() throws IOException;
   public void close() throws IOException;
   
   
 }
 }

+ 8 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java

@@ -55,6 +55,7 @@ public class Text extends BinaryComparable
   
   
   private static ThreadLocal<CharsetEncoder> ENCODER_FACTORY =
   private static ThreadLocal<CharsetEncoder> ENCODER_FACTORY =
     new ThreadLocal<CharsetEncoder>() {
     new ThreadLocal<CharsetEncoder>() {
+      @Override
       protected CharsetEncoder initialValue() {
       protected CharsetEncoder initialValue() {
         return Charset.forName("UTF-8").newEncoder().
         return Charset.forName("UTF-8").newEncoder().
                onMalformedInput(CodingErrorAction.REPORT).
                onMalformedInput(CodingErrorAction.REPORT).
@@ -64,6 +65,7 @@ public class Text extends BinaryComparable
   
   
   private static ThreadLocal<CharsetDecoder> DECODER_FACTORY =
   private static ThreadLocal<CharsetDecoder> DECODER_FACTORY =
     new ThreadLocal<CharsetDecoder>() {
     new ThreadLocal<CharsetDecoder>() {
+    @Override
     protected CharsetDecoder initialValue() {
     protected CharsetDecoder initialValue() {
       return Charset.forName("UTF-8").newDecoder().
       return Charset.forName("UTF-8").newDecoder().
              onMalformedInput(CodingErrorAction.REPORT).
              onMalformedInput(CodingErrorAction.REPORT).
@@ -112,11 +114,13 @@ public class Text extends BinaryComparable
    * valid. Please use {@link #copyBytes()} if you
    * valid. Please use {@link #copyBytes()} if you
    * need the returned array to be precisely the length of the data.
    * need the returned array to be precisely the length of the data.
    */
    */
+  @Override
   public byte[] getBytes() {
   public byte[] getBytes() {
     return bytes;
     return bytes;
   }
   }
 
 
   /** Returns the number of bytes in the byte array */ 
   /** Returns the number of bytes in the byte array */ 
+  @Override
   public int getLength() {
   public int getLength() {
     return length;
     return length;
   }
   }
@@ -281,6 +285,7 @@ public class Text extends BinaryComparable
   
   
   /** deserialize 
   /** deserialize 
    */
    */
+  @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     int newLength = WritableUtils.readVInt(in);
     int newLength = WritableUtils.readVInt(in);
     setCapacity(newLength, false);
     setCapacity(newLength, false);
@@ -313,6 +318,7 @@ public class Text extends BinaryComparable
    * length uses zero-compressed encoding
    * length uses zero-compressed encoding
    * @see Writable#write(DataOutput)
    * @see Writable#write(DataOutput)
    */
    */
+  @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     WritableUtils.writeVInt(out, length);
     WritableUtils.writeVInt(out, length);
     out.write(bytes, 0, length);
     out.write(bytes, 0, length);
@@ -329,6 +335,7 @@ public class Text extends BinaryComparable
   }
   }
 
 
   /** Returns true iff <code>o</code> is a Text with the same contents.  */
   /** Returns true iff <code>o</code> is a Text with the same contents.  */
+  @Override
   public boolean equals(Object o) {
   public boolean equals(Object o) {
     if (o instanceof Text)
     if (o instanceof Text)
       return super.equals(o);
       return super.equals(o);
@@ -346,6 +353,7 @@ public class Text extends BinaryComparable
       super(Text.class);
       super(Text.class);
     }
     }
 
 
+    @Override
     public int compare(byte[] b1, int s1, int l1,
     public int compare(byte[] b1, int s1, int l1,
                        byte[] b2, int s2, int l2) {
                        byte[] b2, int s2, int l2) {
       int n1 = WritableUtils.decodeVIntSize(b1[s1]);
       int n1 = WritableUtils.decodeVIntSize(b1[s1]);

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/TwoDArrayWritable.java

@@ -57,6 +57,7 @@ public class TwoDArrayWritable implements Writable {
 
 
   public Writable[][] get() { return values; }
   public Writable[][] get() { return values; }
 
 
+  @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     // construct matrix
     // construct matrix
     values = new Writable[in.readInt()][];          
     values = new Writable[in.readInt()][];          
@@ -81,6 +82,7 @@ public class TwoDArrayWritable implements Writable {
     }
     }
   }
   }
 
 
+  @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     out.writeInt(values.length);                 // write values
     out.writeInt(values.length);                 // write values
     for (int i = 0; i < values.length; i++) {
     for (int i = 0; i < values.length; i++) {

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java

@@ -110,6 +110,7 @@ public class UTF8 implements WritableComparable<UTF8> {
     System.arraycopy(other.bytes, 0, bytes, 0, length);
     System.arraycopy(other.bytes, 0, bytes, 0, length);
   }
   }
 
 
+  @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     length = in.readUnsignedShort();
     length = in.readUnsignedShort();
     if (bytes == null || bytes.length < length)
     if (bytes == null || bytes.length < length)
@@ -123,6 +124,7 @@ public class UTF8 implements WritableComparable<UTF8> {
     WritableUtils.skipFully(in, length);
     WritableUtils.skipFully(in, length);
   }
   }
 
 
+  @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     out.writeShort(length);
     out.writeShort(length);
     out.write(bytes, 0, length);
     out.write(bytes, 0, length);

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java

@@ -43,10 +43,12 @@ public class VIntWritable implements WritableComparable<VIntWritable> {
   /** Return the value of this VIntWritable. */
   /** Return the value of this VIntWritable. */
   public int get() { return value; }
   public int get() { return value; }
 
 
+  @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     value = WritableUtils.readVInt(in);
     value = WritableUtils.readVInt(in);
   }
   }
 
 
+  @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     WritableUtils.writeVInt(out, value);
     WritableUtils.writeVInt(out, value);
   }
   }

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java

@@ -43,10 +43,12 @@ public class VLongWritable implements WritableComparable<VLongWritable> {
   /** Return the value of this LongWritable. */
   /** Return the value of this LongWritable. */
   public long get() { return value; }
   public long get() { return value; }
 
 
+  @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
     value = WritableUtils.readVLong(in);
     value = WritableUtils.readVLong(in);
   }
   }
 
 
+  @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
     WritableUtils.writeVLong(out, value);
     WritableUtils.writeVLong(out, value);
   }
   }

Some files were not shown because too many files changed in this diff