Ver código fonte

HADOOP-18229. Fix Hadoop-Common JavaDoc Errors (#4292)


Contributed by slfan1989
slfan1989 3 anos atrás
pai
commit
f6fa5bd1aa
100 arquivos alterados com 1611 adições e 392 exclusões
  1. 10 0
      hadoop-common-project/hadoop-common/pom.xml
  2. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigRedactor.java
  3. 34 18
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
  4. 3 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java
  5. 6 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Reconfigurable.java
  6. 3 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java
  7. 10 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java
  8. 3 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationTaskStatus.java
  9. 7 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java
  10. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java
  11. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java
  12. 29 6
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
  13. 11 12
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
  14. 28 20
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
  15. 6 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
  16. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java
  17. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
  18. 6 6
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java
  19. 252 25
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
  20. 11 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java
  21. 3 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java
  22. 45 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java
  23. 6 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java
  24. 9 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java
  25. 32 8
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
  26. 30 7
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
  27. 4 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
  28. 7 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java
  29. 22 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java
  30. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java
  31. 8 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java
  32. 21 5
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
  33. 44 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java
  34. 32 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java
  35. 9 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
  36. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSLinkResolver.java
  37. 8 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java
  38. 21 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java
  39. 73 39
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
  40. 3 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java
  41. 14 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java
  42. 139 31
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
  43. 4 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystemLinkResolver.java
  44. 70 17
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
  45. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
  46. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
  47. 19 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java
  48. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java
  49. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobalStorageStatistics.java
  50. 4 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
  51. 6 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java
  52. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HasFileDescriptor.java
  53. 17 14
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
  54. 5 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java
  55. 7 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32CastagnoliFileChecksum.java
  56. 11 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java
  57. 7 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32GzipFileChecksum.java
  58. 2 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploader.java
  59. 17 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java
  60. 6 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java
  61. 52 12
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/QuotaUsage.java
  62. 6 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
  63. 12 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Seekable.java
  64. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java
  65. 5 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageStatistics.java
  66. 36 7
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java
  67. 14 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java
  68. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java
  69. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractFSBuilderImpl.java
  70. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractMultipartUploader.java
  71. 5 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureDataInputStreamBuilderImpl.java
  72. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java
  73. 3 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/MultipartUploaderBuilderImpl.java
  74. 3 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java
  75. 16 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java
  76. 8 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java
  77. 47 7
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java
  78. 34 5
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java
  79. 18 6
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
  80. 3 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
  81. 2 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
  82. 16 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java
  83. 12 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Expression.java
  84. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/FindOptions.java
  85. 17 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Result.java
  86. 7 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java
  87. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSupport.java
  88. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/MeanStatistic.java
  89. 4 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/impl/IOStatisticsBinding.java
  90. 4 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/DataBlocks.java
  91. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/audit/AuditingFunctions.java
  92. 35 21
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ConfigUtil.java
  93. 9 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java
  94. 33 20
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java
  95. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/MountTableConfigLoader.java
  96. 10 8
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
  97. 9 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemOverloadScheme.java
  98. 3 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemUtil.java
  99. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
  100. 28 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java

+ 10 - 0
hadoop-common-project/hadoop-common/pom.xml

@@ -1171,6 +1171,16 @@
               </execution>
              </executions>
           </plugin>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-javadoc-plugin</artifactId>
+            <configuration>
+              <sourceFileExcludes>
+                <sourceFileExclude>**/FSProtos.java</sourceFileExclude>
+              </sourceFileExcludes>
+              <excludePackageNames>*.proto:*.tracing:*.protobuf</excludePackageNames>
+            </configuration>
+          </plugin>
         </plugins>
       </build>
     </profile>

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigRedactor.java

@@ -57,8 +57,8 @@ public class ConfigRedactor {
    * Given a key / value pair, decides whether or not to redact and returns
    * either the original value or text indicating it has been redacted.
    *
-   * @param key
-   * @param value
+   * @param key param key.
+   * @param value param value, will return if conditions permit.
    * @return Original value, or text indicating it has been redacted
    */
   public String redact(String key, String value) {

+ 34 - 18
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java

@@ -317,7 +317,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
   private boolean loadDefaults = true;
 
   /**
-   * Configuration objects
+   * Configuration objects.
    */
   private static final WeakHashMap<Configuration,Object> REGISTRY = 
     new WeakHashMap<Configuration,Object>();
@@ -1908,6 +1908,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
    * @param name Property name
    * @param vStr The string value with time unit suffix to be converted.
    * @param unit Unit to convert the stored property, if it exists.
+   * @return time duration in given time unit.
    */
   public long getTimeDurationHelper(String name, String vStr, TimeUnit unit) {
     return getTimeDurationHelper(name, vStr, unit, unit);
@@ -1922,6 +1923,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
    * @param vStr The string value with time unit suffix to be converted.
    * @param defaultUnit Unit to convert the stored property, if it exists.
    * @param returnUnit Unit for the returned value.
+   * @return time duration in given time unit.
    */
   private long getTimeDurationHelper(String name, String vStr,
       TimeUnit defaultUnit, TimeUnit returnUnit) {
@@ -2206,7 +2208,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
     }
 
     /**
-     * Is the given value in the set of ranges
+     * Is the given value in the set of ranges.
      * @param value the value to check
      * @return is the value in the ranges?
      */
@@ -2263,7 +2265,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
   }
 
   /**
-   * Parse the given attribute as a set of integer ranges
+   * Parse the given attribute as a set of integer ranges.
    * @param name the attribute name
    * @param defaultValue the default value if it is not set
    * @return a new set of ranges from the configured value
@@ -2482,7 +2484,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
 
   /**
    * Fallback to clear text passwords in configuration.
-   * @param name
+   * @param name the property name.
    * @return clear text password or null
    */
   protected char[] getPasswordFromConfig(String name) {
@@ -2547,6 +2549,8 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
   /**
    * Set the socket address for the <code>name</code> property as
    * a <code>host:port</code>.
+   * @param name property name.
+   * @param addr inetSocketAddress addr.
    */
   public void setSocketAddr(String name, InetSocketAddress addr) {
     set(name, NetUtils.getHostPortString(addr));
@@ -2724,6 +2728,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
    * @param name the conf key name.
    * @param defaultValue default value.
    * @param xface the interface implemented by the named class.
+   * @param <U> Interface class type.
    * @return property value as a <code>Class</code>, 
    *         or <code>defaultValue</code>.
    */
@@ -2753,6 +2758,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
    * @param name the property name.
    * @param xface the interface implemented by the classes named by
    *        <code>name</code>.
+   * @param <U> Interface class type.
    * @return a <code>List</code> of objects implementing <code>xface</code>.
    */
   @SuppressWarnings("unchecked")
@@ -2785,15 +2791,16 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
     set(name, theClass.getName());
   }
 
-  /** 
+  /**
    * Get a local file under a directory named by <i>dirsProp</i> with
    * the given <i>path</i>.  If <i>dirsProp</i> contains multiple directories,
    * then one is chosen based on <i>path</i>'s hash code.  If the selected
    * directory does not exist, an attempt is made to create it.
-   * 
+   *
    * @param dirsProp directory in which to locate the file.
    * @param path file-path.
    * @return local file under the directory with the given path.
+   * @throws IOException raised on errors performing I/O.
    */
   public Path getLocalPath(String dirsProp, String path)
     throws IOException {
@@ -2817,15 +2824,16 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
     throw new IOException("No valid local directories in property: "+dirsProp);
   }
 
-  /** 
+  /**
    * Get a local file name under a directory named in <i>dirsProp</i> with
    * the given <i>path</i>.  If <i>dirsProp</i> contains multiple directories,
    * then one is chosen based on <i>path</i>'s hash code.  If the selected
    * directory does not exist, an attempt is made to create it.
-   * 
+   *
    * @param dirsProp directory in which to locate the file.
    * @param path file-path.
    * @return local file under the directory with the given path.
+   * @throws IOException raised on errors performing I/O.
    */
   public File getFile(String dirsProp, String path)
     throws IOException {
@@ -3437,7 +3445,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
 
   /**
    * Add tags defined in HADOOP_TAGS_SYSTEM, HADOOP_TAGS_CUSTOM.
-   * @param prop
+   * @param prop properties.
    */
   public void addTags(Properties prop) {
     // Get all system tags
@@ -3538,7 +3546,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
 
   /**
    * Print a warning if a property with a given name already exists with a
-   * different value
+   * different value.
    */
   private void checkForOverride(Properties properties, String name, String attr, String value) {
     String propertyValue = properties.getProperty(attr);
@@ -3548,11 +3556,12 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
     }
   }
 
-  /** 
+  /**
    * Write out the non-default properties in this configuration to the given
    * {@link OutputStream} using UTF-8 encoding.
-   * 
+   *
    * @param out the output stream to write to.
+   * @throws IOException raised on errors performing I/O.
    */
   public void writeXml(OutputStream out) throws IOException {
     writeXml(new OutputStreamWriter(out, "UTF-8"));
@@ -3582,7 +3591,9 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
    * the configuration, this method throws an {@link IllegalArgumentException}.
    * </li>
    * </ul>
+   * @param propertyName xml property name.
    * @param out the writer to write to.
+   * @throws IOException raised on errors performing I/O.
    */
   public void writeXml(@Nullable String propertyName, Writer out)
       throws IOException, IllegalArgumentException {
@@ -3736,7 +3747,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
    * @param config the configuration
    * @param propertyName property name
    * @param out the Writer to write to
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    * @throws IllegalArgumentException when property name is not
    *   empty and the property is not found in configuration
    **/
@@ -3783,7 +3794,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
    *
    * @param config the configuration
    * @param out the Writer to write to
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public static void dumpConfiguration(Configuration config,
       Writer out) throws IOException {
@@ -3812,7 +3823,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
    * @param jsonGen json writer
    * @param config configuration
    * @param name property name
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   private static void appendJSONProperty(JsonGenerator jsonGen,
       Configuration config, String name, ConfigRedactor redactor)
@@ -3894,7 +3905,10 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
     return this.quietmode;
   }
   
-  /** For debugging.  List non-default properties to the terminal and exit. */
+  /** For debugging.  List non-default properties to the terminal and exit.
+   * @param args the argument to be parsed.
+   * @throws Exception exception.
+   */
   public static void main(String[] args) throws Exception {
     new Configuration().writeXml(System.out);
   }
@@ -3928,8 +3942,8 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
   }
   
   /**
-   * get keys matching the the regex 
-   * @param regex
+   * get keys matching the the regex.
+   * @param regex the regex to match against.
    * @return {@literal Map<String,String>} with matching keys
    */
   public Map<String,String> getValByRegex(String regex) {
@@ -3974,6 +3988,8 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
   /**
    * Returns whether or not a deprecated name has been warned. If the name is not
    * deprecated then always return false
+   * @param name proprties.
+   * @return true if name is a warned deprecation.
    */
   public static boolean hasWarnedDeprecation(String name) {
     DeprecationContext deprecations = deprecationContext.get();

+ 3 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java

@@ -33,7 +33,9 @@ public class Configured implements Configurable {
     this(null);
   }
   
-  /** Construct a Configured. */
+  /** Construct a Configured.
+   * @param conf the Configuration object.
+   */
   public Configured(Configuration conf) {
     setConf(conf);
   }

+ 6 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Reconfigurable.java

@@ -33,6 +33,9 @@ public interface Reconfigurable extends Configurable {
    * (or null if it was not previously set). If newVal is null, set the property
    * to its default value;
    *
+   * @param property property name.
+   * @param newVal new value.
+   * @throws ReconfigurationException if there was an error applying newVal.
    * If the property cannot be changed, throw a 
    * {@link ReconfigurationException}.
    */
@@ -45,11 +48,14 @@ public interface Reconfigurable extends Configurable {
    * If isPropertyReconfigurable returns true for a property,
    * then changeConf should not throw an exception when changing
    * this property.
+   * @param property property name.
+   * @return true if property reconfigurable; false if not.
    */
   boolean isPropertyReconfigurable(String property);
 
   /**
    * Return all the properties that can be changed at run time.
+   * @return reconfigurable propertys.
    */
   Collection<String> getReconfigurableProperties();
 }

+ 3 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java

@@ -79,6 +79,7 @@ public abstract class ReconfigurableBase
   /**
    * Construct a ReconfigurableBase with the {@link Configuration}
    * conf.
+   * @param conf configuration.
    */
   public ReconfigurableBase(Configuration conf) {
     super((conf == null) ? new Configuration() : conf);
@@ -91,6 +92,7 @@ public abstract class ReconfigurableBase
 
   /**
    * Create a new configuration.
+   * @return configuration.
    */
   protected abstract Configuration getNewConf();
 
@@ -162,6 +164,7 @@ public abstract class ReconfigurableBase
 
   /**
    * Start a reconfiguration task to reload configuration in background.
+   * @throws IOException raised on errors performing I/O.
    */
   public void startReconfigurationTask() throws IOException {
     synchronized (reconfigLock) {

+ 10 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java

@@ -59,6 +59,10 @@ public class ReconfigurationException extends Exception {
 
   /**
    * Create a new instance of {@link ReconfigurationException}.
+   * @param property property name.
+   * @param newVal new value.
+   * @param oldVal old value.
+   * @param cause original exception.
    */
   public ReconfigurationException(String property, 
                                   String newVal, String oldVal,
@@ -71,6 +75,9 @@ public class ReconfigurationException extends Exception {
 
   /**
    * Create a new instance of {@link ReconfigurationException}.
+   * @param property property name.
+   * @param newVal new value.
+   * @param oldVal old value.
    */
   public ReconfigurationException(String property, 
                                   String newVal, String oldVal) {
@@ -82,6 +89,7 @@ public class ReconfigurationException extends Exception {
 
   /**
    * Get property that cannot be changed.
+   * @return property info.
    */
   public String getProperty() {
     return property;
@@ -89,6 +97,7 @@ public class ReconfigurationException extends Exception {
 
   /**
    * Get value to which property was supposed to be changed.
+   * @return new value.
    */
   public String getNewValue() {
     return newVal;
@@ -96,6 +105,7 @@ public class ReconfigurationException extends Exception {
 
   /**
    * Get old value of property that cannot be changed.
+   * @return old value.
    */
   public String getOldValue() {
     return oldVal;

+ 3 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationTaskStatus.java

@@ -42,7 +42,8 @@ public class ReconfigurationTaskStatus {
   /**
    * Return true if
    *   - A reconfiguration task has finished or
-   *   - an active reconfiguration task is running
+   *   - an active reconfiguration task is running.
+   * @return true if startTime &gt; 0; false if not.
    */
   public boolean hasTask() {
     return startTime > 0;
@@ -51,6 +52,7 @@ public class ReconfigurationTaskStatus {
   /**
    * Return true if the latest reconfiguration task has finished and there is
    * no another active task running.
+   * @return true if endTime &gt; 0; false if not.
    */
   public boolean stopped() {
     return endTime > 0;

+ 7 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java

@@ -145,14 +145,18 @@ public abstract class CryptoCodec implements Configurable, Closeable {
   public abstract CipherSuite getCipherSuite();
 
   /**
-   * Create a {@link org.apache.hadoop.crypto.Encryptor}. 
-   * @return Encryptor the encryptor
+   * Create a {@link org.apache.hadoop.crypto.Encryptor}.
+   *
+   * @return Encryptor the encryptor.
+   * @throws GeneralSecurityException thrown if create encryptor error.
    */
   public abstract Encryptor createEncryptor() throws GeneralSecurityException;
-  
+
   /**
    * Create a {@link org.apache.hadoop.crypto.Decryptor}.
+   *
    * @return Decryptor the decryptor
+   * @throws GeneralSecurityException thrown if create decryptor error.
    */
   public abstract Decryptor createDecryptor() throws GeneralSecurityException;
   

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java

@@ -157,7 +157,7 @@ public class CryptoInputStream extends FilterInputStream implements
    * @param off the buffer offset.
    * @param len the maximum number of decrypted data bytes to read.
    * @return int the total number of decrypted data bytes read into the buffer.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   @Override
   public int read(byte[] b, int off, int len) throws IOException {

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java

@@ -146,7 +146,7 @@ public class CryptoOutputStream extends FilterOutputStream implements
    * @param b the data.
    * @param off the start offset in the data.
    * @param len the number of bytes to write.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   @Override
   public synchronized void write(byte[] b, int off, int len) throws IOException {

+ 29 - 6
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java

@@ -39,7 +39,11 @@ public class CryptoStreamUtils {
   private static final Logger LOG =
       LoggerFactory.getLogger(CryptoStreamUtils.class);
 
-  /** Forcibly free the direct buffer. */
+  /**
+   * Forcibly free the direct buffer.
+   *
+   * @param buffer buffer.
+   */
   public static void freeDB(ByteBuffer buffer) {
     if (CleanerUtil.UNMAP_SUPPORTED) {
       try {
@@ -52,13 +56,22 @@ public class CryptoStreamUtils {
     }
   }
 
-  /** Read crypto buffer size */
+  /**
+   * Read crypto buffer size.
+   *
+   * @param conf configuration.
+   * @return hadoop.security.crypto.buffer.size.
+   */
   public static int getBufferSize(Configuration conf) {
     return conf.getInt(HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_KEY, 
         HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_DEFAULT);
   }
-  
-  /** AES/CTR/NoPadding or SM4/CTR/NoPadding is required. */
+
+  /**
+   * AES/CTR/NoPadding or SM4/CTR/NoPadding is required.
+   *
+   * @param codec crypto codec.
+   */
   public static void checkCodec(CryptoCodec codec) {
     if (codec.getCipherSuite() != CipherSuite.AES_CTR_NOPADDING &&
             codec.getCipherSuite() != CipherSuite.SM4_CTR_NOPADDING) {
@@ -67,17 +80,27 @@ public class CryptoStreamUtils {
     }
   }
 
-  /** Check and floor buffer size */
+  /**
+   * Check and floor buffer size.
+   *
+   * @param codec crypto codec.
+   * @param bufferSize the size of the buffer to be used.
+   * @return calc buffer size.
+   */
   public static int checkBufferSize(CryptoCodec codec, int bufferSize) {
     Preconditions.checkArgument(bufferSize >= MIN_BUFFER_SIZE, 
         "Minimum value of buffer size is " + MIN_BUFFER_SIZE + ".");
     return bufferSize - bufferSize % codec.getCipherSuite()
         .getAlgorithmBlockSize();
   }
-  
+
   /**
    * If input stream is {@link org.apache.hadoop.fs.Seekable}, return it's
    * current position, otherwise return 0;
+   *
+   * @param in wrapper.
+   * @return current position, otherwise return 0.
+   * @throws IOException raised on errors performing I/O.
    */
   public static long getInputStreamOffset(InputStream in) throws IOException {
     if (in instanceof Seekable) {

+ 11 - 12
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java

@@ -225,34 +225,33 @@ public final class OpensslCipher {
     output.position(output.position() + len);
     return len;
   }
-  
+
   /**
    * Finishes a multiple-part operation. The data is encrypted or decrypted,
    * depending on how this cipher was initialized.
    * <p>
-   * 
    * The result is stored in the output buffer. Upon return, the output buffer's
    * position will have advanced by n, where n is the value returned by this
    * method; the output buffer's limit will not have changed.
-   * <p>
-   * 
+   * </p>
    * If <code>output.remaining()</code> bytes are insufficient to hold the result,
    * a <code>ShortBufferException</code> is thrown.
    * <p>
-   * 
    * Upon finishing, this method resets this cipher object to the state it was
    * in when previously initialized. That is, the object is available to encrypt
    * or decrypt more data.
-   * <p>
-   * 
-   * If any exception is thrown, this cipher object need to be reset before it 
+   * </p>
+   * If any exception is thrown, this cipher object need to be reset before it
    * can be used again.
-   * 
+   *
    * @param output the output ByteBuffer
    * @return int number of bytes stored in <code>output</code>
-   * @throws ShortBufferException
-   * @throws IllegalBlockSizeException
-   * @throws BadPaddingException
+   * @throws ShortBufferException      if there is insufficient space in the output buffer.
+   * @throws IllegalBlockSizeException This exception is thrown when the length
+   *                                   of data provided to a block cipher is incorrect.
+   * @throws BadPaddingException       This exception is thrown when a particular
+   *                                   padding mechanism is expected for the input
+   *                                   data but the data is not padded properly.
    */
   public int doFinal(ByteBuffer output) throws ShortBufferException, 
       IllegalBlockSizeException, BadPaddingException {

+ 28 - 20
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java

@@ -242,7 +242,7 @@ public abstract class KeyProvider implements Closeable {
     /**
      * Serialize the metadata to a set of bytes.
      * @return the serialized bytes
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     protected byte[] serialize() throws IOException {
       ByteArrayOutputStream buffer = new ByteArrayOutputStream();
@@ -281,7 +281,7 @@ public abstract class KeyProvider implements Closeable {
     /**
      * Deserialize a new metadata object from a set of bytes.
      * @param bytes the serialized metadata
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     protected Metadata(byte[] bytes) throws IOException {
       String cipher = null;
@@ -450,7 +450,7 @@ public abstract class KeyProvider implements Closeable {
    * when decrypting data.
    * @param versionName the name of a specific version of the key
    * @return the key material
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract KeyVersion getKeyVersion(String versionName
                                             ) throws IOException;
@@ -458,14 +458,15 @@ public abstract class KeyProvider implements Closeable {
   /**
    * Get the key names for all keys.
    * @return the list of key names
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract List<String> getKeys() throws IOException;
 
   /**
    * Get key metadata in bulk.
    * @param names the names of the keys to get
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
+   * @return Metadata Array.
    */
   public Metadata[] getKeysMetadata(String... names) throws IOException {
     Metadata[] result = new Metadata[names.length];
@@ -477,8 +478,10 @@ public abstract class KeyProvider implements Closeable {
 
   /**
    * Get the key material for all versions of a specific key name.
+   *
+   * @param name the base name of the key.
    * @return the list of key material
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract List<KeyVersion> getKeyVersions(String name) throws IOException;
 
@@ -488,7 +491,7 @@ public abstract class KeyProvider implements Closeable {
    * @param name the base name of the key
    * @return the version name of the current version of the key or null if the
    *    key version doesn't exist
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public KeyVersion getCurrentKey(String name) throws IOException {
     Metadata meta = getMetadata(name);
@@ -502,7 +505,7 @@ public abstract class KeyProvider implements Closeable {
    * Get metadata about the key.
    * @param name the basename of the key
    * @return the key's metadata or null if the key doesn't exist
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract Metadata getMetadata(String name) throws IOException;
 
@@ -512,7 +515,7 @@ public abstract class KeyProvider implements Closeable {
    * @param material the key material for the first version of the key.
    * @param options the options for the new key.
    * @return the version name of the first version of the key.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract KeyVersion createKey(String name, byte[] material,
                                        Options options) throws IOException;
@@ -537,7 +540,7 @@ public abstract class KeyProvider implements Closeable {
    * @param size length of the key.
    * @param algorithm algorithm to use for generating the key.
    * @return the generated key.
-   * @throws NoSuchAlgorithmException
+   * @throws NoSuchAlgorithmException no such algorithm exception.
    */
   protected byte[] generateKey(int size, String algorithm)
       throws NoSuchAlgorithmException {
@@ -558,8 +561,8 @@ public abstract class KeyProvider implements Closeable {
    * @param name the base name of the key
    * @param options the options for the new key.
    * @return the version name of the first version of the key.
-   * @throws IOException
-   * @throws NoSuchAlgorithmException
+   * @throws IOException raised on errors performing I/O.
+   * @throws NoSuchAlgorithmException no such algorithm exception.
    */
   public KeyVersion createKey(String name, Options options)
       throws NoSuchAlgorithmException, IOException {
@@ -570,7 +573,7 @@ public abstract class KeyProvider implements Closeable {
   /**
    * Delete the given key.
    * @param name the name of the key to delete
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract void deleteKey(String name) throws IOException;
 
@@ -579,7 +582,7 @@ public abstract class KeyProvider implements Closeable {
    * @param name the basename of the key
    * @param material the new key material
    * @return the name of the new version of the key
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract KeyVersion rollNewVersion(String name,
                                              byte[] material
@@ -601,7 +604,10 @@ public abstract class KeyProvider implements Closeable {
    *
    * @param name the basename of the key
    * @return the name of the new version of the key
-   * @throws IOException
+   * @throws IOException              raised on errors performing I/O.
+   * @throws NoSuchAlgorithmException This exception is thrown when a particular
+   *                                  cryptographic algorithm is requested
+   *                                  but is not available in the environment.
    */
   public KeyVersion rollNewVersion(String name) throws NoSuchAlgorithmException,
                                                        IOException {
@@ -620,7 +626,7 @@ public abstract class KeyProvider implements Closeable {
    * version of the given key.
    *
    * @param name the basename of the key
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public void invalidateCache(String name) throws IOException {
     // NOP
@@ -628,7 +634,7 @@ public abstract class KeyProvider implements Closeable {
 
   /**
    * Ensures that any changes to the keys are written to persistent store.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract void flush() throws IOException;
 
@@ -637,7 +643,7 @@ public abstract class KeyProvider implements Closeable {
    * "/aaa/bbb".
    * @param versionName the version name to split
    * @return the base name of the key
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public static String getBaseName(String versionName) throws IOException {
     int div = versionName.lastIndexOf('@');
@@ -660,9 +666,11 @@ public abstract class KeyProvider implements Closeable {
 
   /**
    * Find the provider with the given key.
+   *
    * @param providerList the list of providers
-   * @param keyName the key name we are looking for
+   * @param keyName the key name we are looking for.
    * @return the KeyProvider that has the key
+   * @throws IOException raised on errors performing I/O.
    */
   public static KeyProvider findProvider(List<KeyProvider> providerList,
                                          String keyName) throws IOException {
@@ -680,7 +688,7 @@ public abstract class KeyProvider implements Closeable {
    * means. If true, the password should be provided by the caller using
    * setPassword().
    * @return Whether or not the provider requires a password
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public boolean needsPassword() throws IOException {
     return false;

+ 6 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java

@@ -178,6 +178,7 @@ public class KeyProviderCryptoExtension extends
      * Calls to this method allows the underlying KeyProvider to warm-up any
      * implementation specific caches used to store the Encrypted Keys.
      * @param keyNames Array of Key Names
+     * @throws IOException thrown if the key material could not be encrypted.
      */
     public void warmUpEncryptedKeys(String... keyNames)
         throws IOException;
@@ -474,8 +475,9 @@ public class KeyProviderCryptoExtension extends
   /**
    * This constructor is to be used by sub classes that provide
    * delegating/proxying functionality to the {@link KeyProviderCryptoExtension}
-   * @param keyProvider
-   * @param extension
+   *
+   * @param keyProvider key provider.
+   * @param extension crypto extension.
    */
   protected KeyProviderCryptoExtension(KeyProvider keyProvider,
       CryptoExtension extension) {
@@ -486,6 +488,7 @@ public class KeyProviderCryptoExtension extends
    * Notifies the Underlying CryptoExtension implementation to warm up any
    * implementation specific caches for the specified KeyVersions
    * @param keyNames Arrays of key Names
+   * @throws IOException raised on errors performing I/O.
    */
   public void warmUpEncryptedKeys(String... keyNames)
       throws IOException {
@@ -557,7 +560,7 @@ public class KeyProviderCryptoExtension extends
    * Calls {@link CryptoExtension#drain(String)} for the given key name on the
    * underlying {@link CryptoExtension}.
    *
-   * @param keyName
+   * @param keyName key name.
    */
   public void drain(String keyName) {
     getExtension().drain(keyName);

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java

@@ -48,14 +48,14 @@ public class KeyProviderDelegationTokenExtension extends
      * Renews the given token.
      * @param token The token to be renewed.
      * @return The token's lifetime after renewal, or 0 if it can't be renewed.
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     long renewDelegationToken(final Token<?> token) throws IOException;
 
     /**
      * Cancels the given token.
      * @param token The token to be cancelled.
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     Void cancelDelegationToken(final Token<?> token) throws IOException;
 

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java

@@ -75,7 +75,7 @@ public class KeyShell extends CommandShell {
    * </pre>
    * @param args Command line arguments.
    * @return 0 on success, 1 on failure.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   @Override
   protected int init(String[] args) throws IOException {
@@ -547,7 +547,7 @@ public class KeyShell extends CommandShell {
    * success and 1 for failure.
    *
    * @param args Command line arguments.
-   * @throws Exception
+   * @throws Exception raised on errors performing I/O.
    */
   public static void main(String[] args) throws Exception {
     int res = ToolRunner.run(new Configuration(), new KeyShell(), args);

+ 6 - 6
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java

@@ -63,7 +63,7 @@ public class ValueQueue <E> {
      * @param keyName Key name
      * @param keyQueue Queue that needs to be filled
      * @param numValues number of Values to be added to the queue.
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     public void fillQueueForKey(String keyName,
         Queue<E> keyQueue, int numValues) throws IOException;
@@ -268,7 +268,7 @@ public class ValueQueue <E> {
    * Initializes the Value Queues for the provided keys by calling the
    * fill Method with "numInitValues" values
    * @param keyNames Array of key Names
-   * @throws ExecutionException
+   * @throws ExecutionException executionException.
    */
   public void initializeQueuesForKeys(String... keyNames)
       throws ExecutionException {
@@ -285,8 +285,8 @@ public class ValueQueue <E> {
    * function to add 1 value to Queue and then drain it.
    * @param keyName String key name
    * @return E the next value in the Queue
-   * @throws IOException
-   * @throws ExecutionException
+   * @throws IOException raised on errors performing I/O.
+   * @throws ExecutionException executionException.
    */
   public E getNext(String keyName)
       throws IOException, ExecutionException {
@@ -344,8 +344,8 @@ public class ValueQueue <E> {
    * @param keyName String key name
    * @param num Minimum number of values to return.
    * @return {@literal List<E>} values returned
-   * @throws IOException
-   * @throws ExecutionException
+   * @throws IOException raised on errors performing I/O.
+   * @throws ExecutionException execution exception.
    */
   public List<E> getAtMost(String keyName, int num) throws IOException,
       ExecutionException {

+ 252 - 25
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java

@@ -272,7 +272,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * @param supportedScheme the scheme supported by the implementor
    * @param authorityNeeded if true then theURI must have authority, if false
    *          then the URI must have null authority.
-   *
+   * @param defaultPort default port to use if port is not specified in the URI.
    * @throws URISyntaxException <code>uri</code> has syntax error
    */
   public AbstractFileSystem(final URI uri, final String supportedScheme,
@@ -281,11 +281,12 @@ public abstract class AbstractFileSystem implements PathCapabilities {
     myUri = getUri(uri, supportedScheme, authorityNeeded, defaultPort);
     statistics = getStatistics(uri); 
   }
-  
+
   /**
-   * Check that the Uri's scheme matches
-   * @param uri
-   * @param supportedScheme
+   * Check that the Uri's scheme matches.
+   *
+   * @param uri name URI of the FS.
+   * @param supportedScheme supported scheme.
    */
   public void checkScheme(URI uri, String supportedScheme) {
     String scheme = uri.getScheme();
@@ -362,7 +363,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * If the path is fully qualified URI, then its scheme and authority
    * matches that of this file system. Otherwise the path must be 
    * slash-relative name.
-   * 
+   * @param path the path.
    * @throws InvalidPathException if the path is invalid
    */
   public void checkPath(Path path) {
@@ -431,7 +432,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
   
   /**
    * Make the path fully qualified to this file system
-   * @param path
+   * @param path the path.
    * @return the qualified path
    */
   public Path makeQualified(Path path) {
@@ -496,9 +497,9 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * through any internal symlinks or mount point
    * @param p path to be resolved
    * @return fully qualified path 
-   * @throws FileNotFoundException
-   * @throws AccessControlException
-   * @throws IOException
+   * @throws FileNotFoundException when file not find throw.
+   * @throws AccessControlException when accees control error throw.
+   * @throws IOException raised on errors performing I/O.
    * @throws UnresolvedLinkException if symbolic link on path cannot be
    * resolved internally
    */
@@ -513,6 +514,18 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * {@link FileContext#create(Path, EnumSet, Options.CreateOpts...)} except
    * that the Path f must be fully qualified and the permission is absolute
    * (i.e. umask has been applied).
+   *
+   * @param f the path.
+   * @param createFlag create_flag.
+   * @param opts create ops.
+   * @throws AccessControlException access controll exception.
+   * @throws FileAlreadyExistsException file already exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws ParentNotDirectoryException parent not dir exception.
+   * @throws UnsupportedFileSystemException unsupported file system exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return output stream.
    */
   public final FSDataOutputStream create(final Path f,
       final EnumSet<CreateFlag> createFlag, Options.CreateOpts... opts)
@@ -630,6 +643,24 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link #create(Path, EnumSet, Options.CreateOpts...)} except that the opts
    * have been declared explicitly.
+   *
+   * @param f the path.
+   * @param flag create flag.
+   * @param absolutePermission absolute permission.
+   * @param bufferSize buffer size.
+   * @param replication replications.
+   * @param blockSize block size.
+   * @param progress progress.
+   * @param checksumOpt check sum opt.
+   * @param createParent create parent.
+   * @throws AccessControlException access control exception.
+   * @throws FileAlreadyExistsException file already exists exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws ParentNotDirectoryException parent not directory exception.
+   * @throws UnsupportedFileSystemException unsupported filesystem exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return output stream.
    */
   public abstract FSDataOutputStream createInternal(Path f,
       EnumSet<CreateFlag> flag, FsPermission absolutePermission,
@@ -644,6 +675,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * {@link FileContext#mkdir(Path, FsPermission, boolean)} except that the Path
    * f must be fully qualified and the permission is absolute (i.e. 
    * umask has been applied).
+   * @param dir directory.
+   * @param permission permission.
+   * @param createParent create parent flag.
+   * @throws AccessControlException access control exception.
+   * @throws FileAlreadyExistsException file already exists exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract void mkdir(final Path dir, final FsPermission permission,
       final boolean createParent) throws AccessControlException,
@@ -654,6 +693,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#delete(Path, boolean)} except that Path f must be for
    * this file system.
+   *
+   * @param f the path.
+   * @param recursive recursive flag.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return if successfully deleted success true, not false.
    */
   public abstract boolean delete(final Path f, final boolean recursive)
       throws AccessControlException, FileNotFoundException,
@@ -663,6 +710,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#open(Path)} except that Path f must be for this
    * file system.
+   *
+   * @param f the path.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return input stream.
    */
   public FSDataInputStream open(final Path f) throws AccessControlException,
       FileNotFoundException, UnresolvedLinkException, IOException {
@@ -673,6 +727,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#open(Path, int)} except that Path f must be for this
    * file system.
+   *
+   * @param f the path.
+   * @param bufferSize buffer size.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return if successfully open success true, not false.
    */
   public abstract FSDataInputStream open(final Path f, int bufferSize)
       throws AccessControlException, FileNotFoundException,
@@ -682,6 +744,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#truncate(Path, long)} except that Path f must be for
    * this file system.
+   *
+   * @param f the path.
+   * @param newLength new length.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return if successfully truncate success true, not false.
    */
   public boolean truncate(Path f, long newLength)
       throws AccessControlException, FileNotFoundException,
@@ -694,6 +764,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#setReplication(Path, short)} except that Path f must be
    * for this file system.
+   *
+   * @param f the path.
+   * @param replication replication.
+   * @return if successfully set replication success true, not false.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract boolean setReplication(final Path f,
       final short replication) throws AccessControlException,
@@ -703,6 +781,16 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path
    * f must be for this file system.
+   *
+   * @param src src.
+   * @param dst dst.
+   * @param options options.
+   * @throws AccessControlException access control exception.
+   * @throws FileAlreadyExistsException file already exists exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws ParentNotDirectoryException parent not directory exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
    */
   public final void rename(final Path src, final Path dst,
       final Options.Rename... options) throws AccessControlException,
@@ -727,6 +815,15 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * File systems that do not have a built in overwrite need implement only this
    * method and can take advantage of the default impl of the other
    * {@link #renameInternal(Path, Path, boolean)}
+   *
+   * @param src src.
+   * @param dst dst.
+   * @throws AccessControlException access control exception.
+   * @throws FileAlreadyExistsException file already exists exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws ParentNotDirectoryException parent not directory exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract void renameInternal(final Path src, final Path dst)
       throws AccessControlException, FileAlreadyExistsException,
@@ -737,6 +834,16 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path
    * f must be for this file system.
+   *
+   * @param src src.
+   * @param dst dst.
+   * @param overwrite overwrite flag.
+   * @throws AccessControlException access control exception.
+   * @throws FileAlreadyExistsException file already exists exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws ParentNotDirectoryException parent not directory exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
    */
   public void renameInternal(final Path src, final Path dst,
       boolean overwrite) throws AccessControlException,
@@ -800,6 +907,12 @@ public abstract class AbstractFileSystem implements PathCapabilities {
   /**
    * The specification of this method matches that of  
    * {@link FileContext#createSymlink(Path, Path, boolean)};
+   *
+   * @param target target.
+   * @param link link.
+   * @param createParent create parent.
+   * @throws IOException raised on errors performing I/O.
+   * @throws UnresolvedLinkException unresolved link exception.
    */
   public void createSymlink(final Path target, final Path link,
       final boolean createParent) throws IOException, UnresolvedLinkException {
@@ -810,6 +923,8 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * Partially resolves the path. This is used during symlink resolution in
    * {@link FSLinkResolver}, and differs from the similarly named method
    * {@link FileContext#getLinkTarget(Path)}.
+   * @param f the path.
+   * @return target path.
    * @throws IOException subclass implementations may throw IOException 
    */
   public Path getLinkTarget(final Path f) throws IOException {
@@ -822,6 +937,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#setPermission(Path, FsPermission)} except that Path f
    * must be for this file system.
+   *
+   * @param f the path.
+   * @param permission permission.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract void setPermission(final Path f,
       final FsPermission permission) throws AccessControlException,
@@ -831,6 +953,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#setOwner(Path, String, String)} except that Path f must
    * be for this file system.
+   *
+   * @param f the path.
+   * @param username username.
+   * @param groupname groupname.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract void setOwner(final Path f, final String username,
       final String groupname) throws AccessControlException,
@@ -840,6 +970,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#setTimes(Path, long, long)} except that Path f must be
    * for this file system.
+   *
+   * @param f the path.
+   * @param mtime modify time.
+   * @param atime access time.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract void setTimes(final Path f, final long mtime,
     final long atime) throws AccessControlException, FileNotFoundException,
@@ -849,6 +987,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#getFileChecksum(Path)} except that Path f must be for
    * this file system.
+   *
+   * @param f the path.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return File Check sum.
    */
   public abstract FileChecksum getFileChecksum(final Path f)
       throws AccessControlException, FileNotFoundException,
@@ -859,6 +1004,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * {@link FileContext#getFileStatus(Path)} 
    * except that an UnresolvedLinkException may be thrown if a symlink is 
    * encountered in the path.
+   *
+   * @param f the path.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return File Status
    */
   public abstract FileStatus getFileStatus(final Path f)
       throws AccessControlException, FileNotFoundException,
@@ -870,8 +1022,8 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * In some FileSystem implementations such as HDFS metadata
    * synchronization is essential to guarantee consistency of read requests
    * particularly in HA setting.
-   * @throws IOException
-   * @throws UnsupportedOperationException
+   * @throws IOException raised on errors performing I/O.
+   * @throws UnsupportedOperationException Unsupported Operation Exception.
    */
   public void msync() throws IOException, UnsupportedOperationException {
     throw new UnsupportedOperationException(getClass().getCanonicalName() +
@@ -883,6 +1035,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * {@link FileContext#access(Path, FsAction)}
    * except that an UnresolvedLinkException may be thrown if a symlink is
    * encountered in the path.
+   *
+   * @param path the path.
+   * @param mode fsaction mode.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
    */
   @InterfaceAudience.LimitedPrivate({"HDFS", "Hive"})
   public void access(Path path, FsAction mode) throws AccessControlException,
@@ -897,6 +1056,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * encountered in the path leading up to the final path component.
    * If the file system does not support symlinks then the behavior is
    * equivalent to {@link AbstractFileSystem#getFileStatus(Path)}.
+   *
+   * @param f the path.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnsupportedFileSystemException UnSupported File System Exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return file status.
    */
   public FileStatus getFileLinkStatus(final Path f)
       throws AccessControlException, FileNotFoundException,
@@ -908,6 +1074,15 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#getFileBlockLocations(Path, long, long)} except that
    * Path f must be for this file system.
+   *
+   * @param f the path.
+   * @param start start.
+   * @param len length.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return BlockLocation Array.
    */
   public abstract BlockLocation[] getFileBlockLocations(final Path f,
       final long start, final long len) throws AccessControlException,
@@ -917,6 +1092,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#getFsStatus(Path)} except that Path f must be for this
    * file system.
+   *
+   * @param f the path.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return Fs Status.
    */
   public FsStatus getFsStatus(final Path f) throws AccessControlException,
       FileNotFoundException, UnresolvedLinkException, IOException {
@@ -927,6 +1109,11 @@ public abstract class AbstractFileSystem implements PathCapabilities {
   /**
    * The specification of this method matches that of
    * {@link FileContext#getFsStatus(Path)}.
+   *
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return Fs Status.
    */
   public abstract FsStatus getFsStatus() throws AccessControlException,
       FileNotFoundException, IOException;
@@ -935,6 +1122,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#listStatus(Path)} except that Path f must be for this
    * file system.
+   *
+   * @param f path.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return FileStatus Iterator.
    */
   public RemoteIterator<FileStatus> listStatusIterator(final Path f)
       throws AccessControlException, FileNotFoundException,
@@ -967,6 +1161,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * will have different formats for replicated and erasure coded file. Please
    * refer to {@link FileSystem#getFileBlockLocations(FileStatus, long, long)}
    * for more details.
+   *
+   * @param f the path.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return FileStatus Iterator.
    */
   public RemoteIterator<LocatedFileStatus> listLocatedStatus(final Path f)
       throws AccessControlException, FileNotFoundException,
@@ -999,6 +1200,12 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext.Util#listStatus(Path)} except that Path f must be 
    * for this file system.
+   * @param f the path.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return FileStatus Iterator.
    */
   public abstract FileStatus[] listStatus(final Path f)
       throws AccessControlException, FileNotFoundException,
@@ -1007,7 +1214,8 @@ public abstract class AbstractFileSystem implements PathCapabilities {
   /**
    * @return an iterator over the corrupt files under the given path
    * (may contain duplicates if a file has more than one corrupt block)
-   * @throws IOException
+   * @param path the path.
+   * @throws IOException raised on errors performing I/O.
    */
   public RemoteIterator<Path> listCorruptFileBlocks(Path path)
     throws IOException {
@@ -1020,6 +1228,10 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#setVerifyChecksum(boolean, Path)} except that Path f
    * must be for this file system.
+   *
+   * @param verifyChecksum verify check sum flag.
+   * @throws AccessControlException access control exception.
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract void setVerifyChecksum(final boolean verifyChecksum)
       throws AccessControlException, IOException;
@@ -1041,7 +1253,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * @param renewer the account name that is allowed to renew the token.
    * @return List of delegation tokens.
    *   If delegation tokens not supported then return a list of size zero.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   @InterfaceAudience.LimitedPrivate( { "HDFS", "MapReduce" })
   public List<Token<?>> getDelegationTokens(String renewer) throws IOException {
@@ -1141,7 +1353,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * @param path Path to modify
    * @param name xattr name.
    * @param value xattr value.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public void setXAttr(Path path, String name, byte[] value)
       throws IOException {
@@ -1160,7 +1372,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * @param name xattr name.
    * @param value xattr value.
    * @param flag xattr set flag
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public void setXAttr(Path path, String name, byte[] value,
       EnumSet<XAttrSetFlag> flag) throws IOException {
@@ -1178,7 +1390,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * @param path Path to get extended attribute
    * @param name xattr name.
    * @return byte[] xattr value.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public byte[] getXAttr(Path path, String name) throws IOException {
     throw new UnsupportedOperationException(getClass().getSimpleName()
@@ -1196,7 +1408,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    *
    * @return {@literal Map<String, byte[]>} describing the XAttrs of the file
    * or directory
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public Map<String, byte[]> getXAttrs(Path path) throws IOException {
     throw new UnsupportedOperationException(getClass().getSimpleName()
@@ -1214,7 +1426,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * @param names XAttr names.
    * @return {@literal Map<String, byte[]>} describing the XAttrs of the file
    * or directory
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public Map<String, byte[]> getXAttrs(Path path, List<String> names)
       throws IOException {
@@ -1232,7 +1444,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * @param path Path to get extended attributes
    * @return {@literal Map<String, byte[]>} describing the XAttrs of the file
    * or directory
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public List<String> listXAttrs(Path path)
           throws IOException {
@@ -1249,7 +1461,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    *
    * @param path Path to remove extended attribute
    * @param name xattr name
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public void removeXAttr(Path path, String name) throws IOException {
     throw new UnsupportedOperationException(getClass().getSimpleName()
@@ -1259,6 +1471,11 @@ public abstract class AbstractFileSystem implements PathCapabilities {
   /**
    * The specification of this method matches that of
    * {@link FileContext#createSnapshot(Path, String)}.
+   *
+   * @param path the path.
+   * @param snapshotName snapshot name.
+   * @throws IOException raised on errors performing I/O.
+   * @return path.
    */
   public Path createSnapshot(final Path path, final String snapshotName)
       throws IOException {
@@ -1269,6 +1486,11 @@ public abstract class AbstractFileSystem implements PathCapabilities {
   /**
    * The specification of this method matches that of
    * {@link FileContext#renameSnapshot(Path, String, String)}.
+   *
+   * @param path the path.
+   * @param snapshotOldName snapshot old name.
+   * @param snapshotNewName snapshot new name.
+   * @throws IOException raised on errors performing I/O.
    */
   public void renameSnapshot(final Path path, final String snapshotOldName,
       final String snapshotNewName) throws IOException {
@@ -1279,6 +1501,10 @@ public abstract class AbstractFileSystem implements PathCapabilities {
   /**
    * The specification of this method matches that of
    * {@link FileContext#deleteSnapshot(Path, String)}.
+   *
+   * @param snapshotDir snapshot dir.
+   * @param snapshotName snapshot name.
+   * @throws IOException raised on errors performing I/O.
    */
   public void deleteSnapshot(final Path snapshotDir, final String snapshotName)
       throws IOException {
@@ -1289,7 +1515,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
   /**
    * Set the source path to satisfy storage policy.
    * @param path The source path referring to either a directory or a file.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public void satisfyStoragePolicy(final Path path) throws IOException {
     throw new UnsupportedOperationException(
@@ -1303,6 +1529,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * @param policyName the name of the target storage policy. The list
    *                   of supported Storage policies can be retrieved
    *                   via {@link #getAllStoragePolicies}.
+   * @throws IOException raised on errors performing I/O.
    */
   public void setStoragePolicy(final Path path, final String policyName)
       throws IOException {
@@ -1314,7 +1541,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
   /**
    * Unset the storage policy set for a given file or directory.
    * @param src file or directory path.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public void unsetStoragePolicy(final Path src) throws IOException {
     throw new UnsupportedOperationException(getClass().getSimpleName()
@@ -1326,7 +1553,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    *
    * @param src file or directory path.
    * @return storage policy for give file.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public BlockStoragePolicySpi getStoragePolicy(final Path src)
       throws IOException {
@@ -1338,7 +1565,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * Retrieve all the storage policies supported by this file system.
    *
    * @return all storage policies supported by this filesystem.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public Collection<? extends BlockStoragePolicySpi> getAllStoragePolicies()
       throws IOException {

+ 11 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java

@@ -36,13 +36,22 @@ public class AvroFSInput implements Closeable, SeekableInput {
   private final FSDataInputStream stream;
   private final long len;
 
-  /** Construct given an {@link FSDataInputStream} and its length. */
+  /**
+   * Construct given an {@link FSDataInputStream} and its length.
+   *
+   * @param in inputstream.
+   * @param len len.
+   */
   public AvroFSInput(final FSDataInputStream in, final long len) {
     this.stream = in;
     this.len = len;
   }
 
-  /** Construct given a {@link FileContext} and a {@link Path}. */
+  /** Construct given a {@link FileContext} and a {@link Path}.
+   * @param fc filecontext.
+   * @param p the path.
+   * @throws IOException If an I/O error occurred.
+   * */
   public AvroFSInput(final FileContext fc, final Path p) throws IOException {
     FileStatus status = fc.getFileStatus(p);
     this.len = status.getLen();

+ 3 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java

@@ -68,6 +68,7 @@ public abstract class BatchedRemoteIterator<K, E> implements RemoteIterator<E> {
    * 
    * @param prevKey The key to send.
    * @return A list of replies.
+   * @throws IOException If an I/O error occurred.
    */
   public abstract BatchedEntries<E> makeRequest(K prevKey) throws IOException;
 
@@ -102,6 +103,8 @@ public abstract class BatchedRemoteIterator<K, E> implements RemoteIterator<E> {
 
   /**
    * Return the next list key associated with an element.
+   * @param element element.
+   * @return K Generics Type.
    */
   public abstract K elementToPrevKey(E element);
 

+ 45 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java

@@ -85,6 +85,7 @@ public class BlockLocation implements Serializable {
 
   /**
    * Copy constructor.
+   * @param that blocklocation.
    */
   public BlockLocation(BlockLocation that) {
     this.hosts = that.hosts;
@@ -100,6 +101,10 @@ public class BlockLocation implements Serializable {
 
   /**
    * Constructor with host, name, offset and length.
+   * @param names names array.
+   * @param hosts host array.
+   * @param offset offset.
+   * @param length length.
    */
   public BlockLocation(String[] names, String[] hosts, long offset, 
                        long length) {
@@ -108,6 +113,11 @@ public class BlockLocation implements Serializable {
 
   /**
    * Constructor with host, name, offset, length and corrupt flag.
+   * @param names names.
+   * @param hosts hosts.
+   * @param offset offset.
+   * @param length length.
+   * @param corrupt corrupt.
    */
   public BlockLocation(String[] names, String[] hosts, long offset, 
                        long length, boolean corrupt) {
@@ -116,6 +126,11 @@ public class BlockLocation implements Serializable {
 
   /**
    * Constructor with host, name, network topology, offset and length.
+   * @param names names.
+   * @param hosts hosts.
+   * @param topologyPaths topologyPaths.
+   * @param offset offset.
+   * @param length length.
    */
   public BlockLocation(String[] names, String[] hosts, String[] topologyPaths,
                        long offset, long length) {
@@ -125,6 +140,12 @@ public class BlockLocation implements Serializable {
   /**
    * Constructor with host, name, network topology, offset, length 
    * and corrupt flag.
+   * @param names names.
+   * @param hosts hosts.
+   * @param topologyPaths topologyPaths.
+   * @param offset offset.
+   * @param length length.
+   * @param corrupt corrupt.
    */
   public BlockLocation(String[] names, String[] hosts, String[] topologyPaths,
                        long offset, long length, boolean corrupt) {
@@ -177,6 +198,8 @@ public class BlockLocation implements Serializable {
 
   /**
    * Get the list of hosts (hostname) hosting this block.
+   * @return hosts array.
+   * @throws IOException If an I/O error occurred.
    */
   public String[] getHosts() throws IOException {
     return hosts;
@@ -184,6 +207,7 @@ public class BlockLocation implements Serializable {
 
   /**
    * Get the list of hosts (hostname) hosting a cached replica of the block.
+   * @return cached hosts.
    */
   public String[] getCachedHosts() {
     return cachedHosts;
@@ -191,6 +215,8 @@ public class BlockLocation implements Serializable {
 
   /**
    * Get the list of names (IP:xferPort) hosting this block.
+   * @return names array.
+   * @throws IOException If an I/O error occurred.
    */
   public String[] getNames() throws IOException {
     return names;
@@ -199,6 +225,8 @@ public class BlockLocation implements Serializable {
   /**
    * Get the list of network topology paths for each of the hosts.
    * The last component of the path is the "name" (IP:xferPort).
+   * @return topology paths.
+   * @throws IOException If an I/O error occurred.
    */
   public String[] getTopologyPaths() throws IOException {
     return topologyPaths;
@@ -206,6 +234,7 @@ public class BlockLocation implements Serializable {
 
   /**
    * Get the storageID of each replica of the block.
+   * @return storage ids.
    */
   public String[] getStorageIds() {
     return storageIds;
@@ -213,6 +242,7 @@ public class BlockLocation implements Serializable {
 
   /**
    * Get the storage type of each replica of the block.
+   * @return storage type of each replica of the block.
    */
   public StorageType[] getStorageTypes() {
     return storageTypes;
@@ -220,6 +250,7 @@ public class BlockLocation implements Serializable {
 
   /**
    * Get the start offset of file associated with this block.
+   * @return start offset of file associated with this block.
    */
   public long getOffset() {
     return offset;
@@ -227,6 +258,7 @@ public class BlockLocation implements Serializable {
   
   /**
    * Get the length of the block.
+   * @return length of the block.
    */
   public long getLength() {
     return length;
@@ -234,6 +266,7 @@ public class BlockLocation implements Serializable {
 
   /**
    * Get the corrupt flag.
+   * @return corrupt flag.
    */
   public boolean isCorrupt() {
     return corrupt;
@@ -241,6 +274,7 @@ public class BlockLocation implements Serializable {
 
   /**
    * Return true if the block is striped (erasure coded).
+   * @return if the block is striped true, not false.
    */
   public boolean isStriped() {
     return false;
@@ -248,6 +282,7 @@ public class BlockLocation implements Serializable {
 
   /**
    * Set the start offset of file associated with this block.
+   * @param offset start offset.
    */
   public void setOffset(long offset) {
     this.offset = offset;
@@ -255,6 +290,7 @@ public class BlockLocation implements Serializable {
 
   /**
    * Set the length of block.
+   * @param length length of block.
    */
   public void setLength(long length) {
     this.length = length;
@@ -262,6 +298,7 @@ public class BlockLocation implements Serializable {
 
   /**
    * Set the corrupt flag.
+   * @param corrupt corrupt flag.
    */
   public void setCorrupt(boolean corrupt) {
     this.corrupt = corrupt;
@@ -269,6 +306,8 @@ public class BlockLocation implements Serializable {
 
   /**
    * Set the hosts hosting this block.
+   * @param hosts hosts array.
+   * @throws IOException If an I/O error occurred.
    */
   public void setHosts(String[] hosts) throws IOException {
     if (hosts == null) {
@@ -280,6 +319,7 @@ public class BlockLocation implements Serializable {
 
   /**
    * Set the hosts hosting a cached replica of this block.
+   * @param cachedHosts cached hosts.
    */
   public void setCachedHosts(String[] cachedHosts) {
     if (cachedHosts == null) {
@@ -291,6 +331,8 @@ public class BlockLocation implements Serializable {
 
   /**
    * Set the names (host:port) hosting this block.
+   * @param names names.
+   * @throws IOException If an I/O error occurred.
    */
   public void setNames(String[] names) throws IOException {
     if (names == null) {
@@ -302,6 +344,9 @@ public class BlockLocation implements Serializable {
 
   /**
    * Set the network topology paths of the hosts.
+   *
+   * @param topologyPaths topology paths.
+   * @throws IOException If an I/O error occurred.
    */
   public void setTopologyPaths(String[] topologyPaths) throws IOException {
     if (topologyPaths == null) {

+ 6 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java

@@ -47,6 +47,12 @@ public final class ByteBufferUtil {
 
   /**
    * Perform a fallback read.
+   *
+   * @param stream input stream.
+   * @param bufferPool bufferPool.
+   * @param maxLength maxLength.
+   * @throws IOException raised on errors performing I/O.
+   * @return byte buffer.
    */
   public static ByteBuffer fallbackRead(
       InputStream stream, ByteBufferPool bufferPool, int maxLength)

+ 9 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java

@@ -53,6 +53,9 @@ public abstract class CachingGetSpaceUsed implements Closeable, GetSpaceUsed {
   /**
    * This is the constructor used by the builder.
    * All overriding classes should implement this.
+   *
+   * @param builder builder.
+   * @throws IOException raised on errors performing I/O.
    */
   public CachingGetSpaceUsed(CachingGetSpaceUsed.Builder builder)
       throws IOException {
@@ -140,6 +143,8 @@ public abstract class CachingGetSpaceUsed implements Closeable, GetSpaceUsed {
 
   /**
    * Increment the cached value of used space.
+   *
+   * @param value dfs used value.
    */
   public void incDfsUsed(long value) {
     used.addAndGet(value);
@@ -154,6 +159,8 @@ public abstract class CachingGetSpaceUsed implements Closeable, GetSpaceUsed {
 
   /**
    * How long in between runs of the background refresh.
+   *
+   * @return refresh interval.
    */
   @VisibleForTesting
   public long getRefreshInterval() {
@@ -163,6 +170,8 @@ public abstract class CachingGetSpaceUsed implements Closeable, GetSpaceUsed {
   /**
    * Randomize the refresh interval timing by this amount, the actual interval will be chosen
    * uniformly between {@code interval-jitter} and {@code interval+jitter}.
+   *
+   * @return between interval-jitter and interval+jitter.
    */
   @VisibleForTesting
   public long getJitter() {

+ 32 - 8
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java

@@ -102,25 +102,44 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
     return fs;
   }
 
-  /** Return the name of the checksum file associated with a file.*/
+  /**
+   * Return the name of the checksum file associated with a file.
+   *
+   * @param file the file path.
+   * @return name of the checksum file associated with a file.
+   */
   public Path getChecksumFile(Path file) {
     return new Path(file.getParent(), "." + file.getName() + ".crc");
   }
 
-  /** Return true iff file is a checksum file name.*/
+  /**
+   * Return true if file is a checksum file name.
+   *
+   * @param file the file path.
+   * @return if file is a checksum file true, not false.
+   */
   public static boolean isChecksumFile(Path file) {
     String name = file.getName();
     return name.startsWith(".") && name.endsWith(".crc");
   }
 
-  /** Return the length of the checksum file given the size of the 
+  /**
+   * Return the length of the checksum file given the size of the
    * actual file.
-   **/
+   *
+   * @param file the file path.
+   * @param fileSize file size.
+   * @return checksum length.
+   */
   public long getChecksumFileLength(Path file, long fileSize) {
     return getChecksumLength(fileSize, getBytesPerSum());
   }
 
-  /** Return the bytes Per Checksum */
+  /**
+   * Return the bytes Per Checksum.
+   *
+   * @return bytes per check sum.
+   */
   public int getBytesPerSum() {
     return bytesPerChecksum;
   }
@@ -362,6 +381,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
    * Opens an FSDataInputStream at the indicated Path.
    * @param f the file name to open
    * @param bufferSize the size of the buffer to be used.
+   * @throws IOException if an I/O error occurs.
    */
   @Override
   public FSDataInputStream open(Path f, int bufferSize) throws IOException {
@@ -669,7 +689,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
    * Implement the abstract <tt>setReplication</tt> of <tt>FileSystem</tt>
    * @param src file name
    * @param replication new replication
-   * @throws IOException
+   * @throws IOException if an I/O error occurs.
    * @return true if successful;
    *         false if file does not exist or is a directory
    */
@@ -754,7 +774,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
    * @param f
    *          given path
    * @return the statuses of the files/directories in the given path
-   * @throws IOException
+   * @throws IOException if an I/O error occurs.
    */
   @Override
   public FileStatus[] listStatus(Path f) throws IOException {
@@ -775,7 +795,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
    * @param f
    *          given path
    * @return the statuses of the files/directories in the given patch
-   * @throws IOException
+   * @throws IOException if an I/O error occurs.
    */
   @Override
   public RemoteIterator<LocatedFileStatus> listLocatedStatus(Path f)
@@ -811,6 +831,10 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
    * Copy it from FS control to the local dst name.
    * If src and dst are directories, the copyCrc parameter
    * determines whether to copy CRC files.
+   * @param src src path.
+   * @param dst dst path.
+   * @param copyCrc copy csc flag.
+   * @throws IOException if an I/O error occurs.
    */
   @SuppressWarnings("deprecation")
   public void copyToLocalFile(Path src, Path dst, boolean copyCrc)

+ 30 - 7
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java

@@ -70,30 +70,53 @@ public abstract class ChecksumFs extends FilterFs {
     this.verifyChecksum = inVerifyChecksum;
   }
 
-  /** get the raw file system. */
+  /**
+   * get the raw file system.
+   *
+   * @return abstract file system.
+   */
   public AbstractFileSystem getRawFs() {
     return getMyFs();
   }
 
-  /** Return the name of the checksum file associated with a file.*/
+  /**
+   * Return the name of the checksum file associated with a file.
+   *
+   * @param file the file path.
+   * @return the checksum file associated with a file.
+   */
   public Path getChecksumFile(Path file) {
     return new Path(file.getParent(), "." + file.getName() + ".crc");
   }
 
-  /** Return true iff file is a checksum file name.*/
+  /**
+   * Return true iff file is a checksum file name.
+   *
+   * @param file the file path.
+   * @return if is checksum file true,not false.
+   */
   public static boolean isChecksumFile(Path file) {
     String name = file.getName();
     return name.startsWith(".") && name.endsWith(".crc");
   }
 
-  /** Return the length of the checksum file given the size of the 
+  /**
+   * Return the length of the checksum file given the size of the
    * actual file.
-   **/
+   *
+   * @param file the file path.
+   * @param fileSize file size.
+   * @return check sum file length.
+   */
   public long getChecksumFileLength(Path file, long fileSize) {
     return getChecksumLength(fileSize, getBytesPerSum());
   }
 
-  /** Return the bytes Per Checksum. */
+  /**
+   * Return the bytes Per Checksum.
+   *
+   * @return bytes per sum.
+   */
   public int getBytesPerSum() {
     return defaultBytesPerChecksum;
   }
@@ -433,7 +456,7 @@ public abstract class ChecksumFs extends FilterFs {
    * Implement the abstract <tt>setReplication</tt> of <tt>FileSystem</tt>
    * @param src file name
    * @param replication new replication
-   * @throws IOException
+   * @throws IOException if an I/O error occurs.
    * @return true if successful;
    *         false if file does not exist or is a directory
    */

+ 4 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java

@@ -169,11 +169,11 @@ public class CommonConfigurationKeysPublic {
 
   /**
    * Number of filesystems instances can be created in parallel.
-   * <p></p>
+   * <p>
    * A higher number here does not necessarily improve performance, especially
    * for object stores, where multiple threads may be attempting to create an FS
    * instance for the same URI.
-   * <p></p>
+   * </p>
    * Default value: {@value}.
    */
   public static final String FS_CREATION_PARALLEL_COUNT =
@@ -181,8 +181,9 @@ public class CommonConfigurationKeysPublic {
 
   /**
    * Default value for {@link #FS_CREATION_PARALLEL_COUNT}.
-   * <p></p>
+   * <p>
    * Default value: {@value}.
+   * </p>
    */
   public static final int FS_CREATION_PARALLEL_COUNT_DEFAULT =
       64;

+ 7 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java

@@ -37,7 +37,13 @@ public class CompositeCrcFileChecksum extends FileChecksum {
   private DataChecksum.Type crcType;
   private int bytesPerCrc;
 
-  /** Create a CompositeCrcFileChecksum. */
+  /**
+   * Create a CompositeCrcFileChecksum.
+   *
+   * @param crc crc.
+   * @param crcType crcType.
+   * @param bytesPerCrc bytesPerCrc.
+   */
   public CompositeCrcFileChecksum(
       int crc, DataChecksum.Type crcType, int bytesPerCrc) {
     this.crc = crc;

+ 22 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java

@@ -149,17 +149,31 @@ public class ContentSummary extends QuotaUsage implements Writable{
   @Deprecated
   public ContentSummary() {}
   
-  /** Constructor, deprecated by ContentSummary.Builder
+  /**
+   *  Constructor, deprecated by ContentSummary.Builder
    *  This constructor implicitly set spaceConsumed the same as length.
    *  spaceConsumed and length must be set explicitly with
-   *  ContentSummary.Builder
+   *  ContentSummary.Builder.
+   *
+   * @param length length.
+   * @param fileCount file count.
+   * @param directoryCount directory count.
    * */
   @Deprecated
   public ContentSummary(long length, long fileCount, long directoryCount) {
     this(length, fileCount, directoryCount, -1L, length, -1L);
   }
 
-  /** Constructor, deprecated by ContentSummary.Builder */
+  /**
+   * Constructor, deprecated by ContentSummary.Builder.
+   *
+   * @param length length.
+   * @param fileCount file count.
+   * @param directoryCount directory count.
+   * @param quota quota.
+   * @param spaceConsumed space consumed.
+   * @param spaceQuota space quota.
+   * */
   @Deprecated
   public ContentSummary(
       long length, long fileCount, long directoryCount, long quota,
@@ -172,7 +186,11 @@ public class ContentSummary extends QuotaUsage implements Writable{
     setSpaceQuota(spaceQuota);
   }
 
-  /** Constructor for ContentSummary.Builder*/
+  /**
+   * Constructor for ContentSummary.Builder.
+   *
+   * @param builder builder.
+   */
   private ContentSummary(Builder builder) {
     super(builder);
     this.length = builder.length;

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java

@@ -189,6 +189,8 @@ public enum CreateFlag {
   /**
    * Validate the CreateFlag for the append operation. The flag must contain
    * APPEND, and cannot contain OVERWRITE.
+   *
+   * @param flag enum set flag.
    */
   public static void validateForAppend(EnumSet<CreateFlag> flag) {
     validate(flag);

+ 8 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java

@@ -65,7 +65,10 @@ public class DF extends Shell {
     return dirPath;
   }
 
-  /** @return a string indicating which filesystem volume we're checking. */
+  /**
+   * @return a string indicating which filesystem volume we're checking.
+   * @throws IOException raised on errors performing I/O.
+   */
   public String getFilesystem() throws IOException {
     if (Shell.WINDOWS) {
       this.filesystem = dirFile.getCanonicalPath().substring(0, 2);
@@ -100,7 +103,10 @@ public class DF extends Shell {
     return (int) (used * 100.0 / cap);
   }
 
-  /** @return the filesystem mount point for the indicated volume */
+  /**
+   * @return the filesystem mount point for the indicated volume.
+   * @throws IOException raised on errors performing I/O.
+   */
   public String getMount() throws IOException {
     // Abort early if specified path does not exist
     if (!dirFile.exists()) {

+ 21 - 5
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java

@@ -47,7 +47,11 @@ public class DelegationTokenRenewer
     /** @return the renew token. */
     public Token<?> getRenewToken();
 
-    /** Set delegation token. */
+    /**
+     * Set delegation token.
+     * @param <T> generic type T.
+     * @param token token.
+     */
     public <T extends TokenIdentifier> void setDelegationToken(Token<T> token);
   }
 
@@ -172,7 +176,11 @@ public class DelegationTokenRenewer
   /** Queue to maintain the RenewActions to be processed by the {@link #run()} */
   private volatile DelayQueue<RenewAction<?>> queue = new DelayQueue<RenewAction<?>>();
   
-  /** For testing purposes */
+  /**
+   * For testing purposes.
+   *
+   * @return renew queue length.
+   */
   @VisibleForTesting
   protected int getRenewQueueLength() {
     return queue.size();
@@ -211,7 +219,13 @@ public class DelegationTokenRenewer
     }
   }
   
-  /** Add a renew action to the queue. */
+  /**
+   * Add a renew action to the queue.
+   *
+   * @param <T> generic type T.
+   * @param fs file system.
+   * @return renew action.
+   * */
   @SuppressWarnings("static-access")
   public <T extends FileSystem & Renewable> RenewAction<T> addRenewAction(final T fs) {
     synchronized (this) {
@@ -230,8 +244,10 @@ public class DelegationTokenRenewer
 
   /**
    * Remove the associated renew action from the queue
-   * 
-   * @throws IOException
+   *
+   * @param <T> generic type T.
+   * @param fs file system.
+   * @throws IOException raised on errors performing I/O.
    */
   public <T extends FileSystem & Renewable> void removeRenewAction(
       final T fs) throws IOException {

+ 44 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java

@@ -37,12 +37,17 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
 
   /**
    * Set optional Builder parameter.
+   * @param key key.
+   * @param value value.
+   * @return generic type B.
    */
   B opt(@Nonnull String key, @Nonnull String value);
 
   /**
    * Set optional boolean parameter for the Builder.
-   *
+   * @param key key.
+   * @param value value.
+   * @return generic type B.
    * @see #opt(String, String)
    */
   B opt(@Nonnull String key, boolean value);
@@ -50,6 +55,9 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
   /**
    * Set optional int parameter for the Builder.
    *
+   * @param key key.
+   * @param value value.
+   * @return generic type B.
    * @see #opt(String, String)
    */
   B opt(@Nonnull String key, int value);
@@ -57,6 +65,9 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
   /**
    * Set optional float parameter for the Builder.
    *
+   * @param key key.
+   * @param value value.
+   * @return generic type B.
    * @see #opt(String, String)
    */
   B opt(@Nonnull String key, float value);
@@ -64,6 +75,9 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
   /**
    * Set optional long parameter for the Builder.
    *
+   * @param key key.
+   * @param value value.
+   * @return generic type B.
    * @see #opt(String, String)
    */
   B opt(@Nonnull String key, long value);
@@ -71,6 +85,9 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
   /**
    * Set optional double parameter for the Builder.
    *
+   * @param key key.
+   * @param value value.
+   * @return generic type B.
    * @see #opt(String, String)
    */
   B opt(@Nonnull String key, double value);
@@ -78,6 +95,9 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
   /**
    * Set an array of string values as optional parameter for the Builder.
    *
+   * @param key key.
+   * @param values values.
+   * @return generic type B.
    * @see #opt(String, String)
    */
   B opt(@Nonnull String key, @Nonnull String... values);
@@ -87,12 +107,19 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
    *
    * If the option is not supported or unavailable,
    * the client should expect {@link #build()} throws IllegalArgumentException.
+   *
+   * @param key key.
+   * @param value value.
+   * @return generic type B.
    */
   B must(@Nonnull String key, @Nonnull String value);
 
   /**
    * Set mandatory boolean option.
    *
+   * @param key key.
+   * @param value value.
+   * @return generic type B.
    * @see #must(String, String)
    */
   B must(@Nonnull String key, boolean value);
@@ -100,6 +127,9 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
   /**
    * Set mandatory int option.
    *
+   * @param key key.
+   * @param value value.
+   * @return generic type B.
    * @see #must(String, String)
    */
   B must(@Nonnull String key, int value);
@@ -107,6 +137,9 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
   /**
    * Set mandatory float option.
    *
+   * @param key key.
+   * @param value value.
+   * @return generic type B.
    * @see #must(String, String)
    */
   B must(@Nonnull String key, float value);
@@ -114,6 +147,9 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
   /**
    * Set mandatory long option.
    *
+   * @param key key.
+   * @param value value.
+   * @return generic type B.
    * @see #must(String, String)
    */
   B must(@Nonnull String key, long value);
@@ -121,6 +157,9 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
   /**
    * Set mandatory double option.
    *
+   * @param key key.
+   * @param value value.
+   * @return generic type B.
    * @see #must(String, String)
    */
   B must(@Nonnull String key, double value);
@@ -128,6 +167,9 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
   /**
    * Set a string array as mandatory option.
    *
+   * @param key key.
+   * @param values values.
+   * @return generic type B.
    * @see #must(String, String)
    */
   B must(@Nonnull String key, @Nonnull String... values);
@@ -139,6 +181,7 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
    * @throws UnsupportedOperationException if the filesystem does not support
    * the specific operation.
    * @throws IOException on filesystem IO errors.
+   * @return generic type S.
    */
   S build() throws IllegalArgumentException,
       UnsupportedOperationException, IOException;

+ 32 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java

@@ -123,6 +123,9 @@ public abstract class FSDataOutputStreamBuilder
 
   /**
    * Constructor.
+   *
+   * @param fileSystem file system.
+   * @param p the path.
    */
   protected FSDataOutputStreamBuilder(@Nonnull FileSystem fileSystem,
       @Nonnull Path p) {
@@ -149,6 +152,9 @@ public abstract class FSDataOutputStreamBuilder
 
   /**
    * Set permission for the file.
+   *
+   * @param perm permission.
+   * @return B Generics Type.
    */
   public B permission(@Nonnull final FsPermission perm) {
     checkNotNull(perm);
@@ -162,6 +168,9 @@ public abstract class FSDataOutputStreamBuilder
 
   /**
    * Set the size of the buffer to be used.
+   *
+   * @param bufSize buffer size.
+   * @return Generics Type B.
    */
   public B bufferSize(int bufSize) {
     bufferSize = bufSize;
@@ -174,6 +183,9 @@ public abstract class FSDataOutputStreamBuilder
 
   /**
    * Set replication factor.
+   *
+   * @param replica replica.
+   * @return Generics Type B.
    */
   public B replication(short replica) {
     replication = replica;
@@ -186,6 +198,9 @@ public abstract class FSDataOutputStreamBuilder
 
   /**
    * Set block size.
+   *
+   * @param blkSize block size.
+   * @return B Generics Type.
    */
   public B blockSize(long blkSize) {
     blockSize = blkSize;
@@ -194,6 +209,8 @@ public abstract class FSDataOutputStreamBuilder
 
   /**
    * Return true to create the parent directories if they do not exist.
+   *
+   * @return if create the parent directories if they do not exist true,not false.
    */
   protected boolean isRecursive() {
     return recursive;
@@ -201,6 +218,8 @@ public abstract class FSDataOutputStreamBuilder
 
   /**
    * Create the parent directory if they do not exist.
+   *
+   * @return B Generics Type.
    */
   public B recursive() {
     recursive = true;
@@ -213,6 +232,9 @@ public abstract class FSDataOutputStreamBuilder
 
   /**
    * Set the facility of reporting progress.
+   *
+   * @param prog progress.
+   * @return B Generics Type.
    */
   public B progress(@Nonnull final Progressable prog) {
     checkNotNull(prog);
@@ -226,6 +248,8 @@ public abstract class FSDataOutputStreamBuilder
 
   /**
    * Create an FSDataOutputStream at the specified path.
+   *
+   * @return return Generics Type B.
    */
   public B create() {
     flags.add(CreateFlag.CREATE);
@@ -236,6 +260,9 @@ public abstract class FSDataOutputStreamBuilder
    * Set to true to overwrite the existing file.
    * Set it to false, an exception will be thrown when calling {@link #build()}
    * if the file exists.
+   *
+   * @param overwrite overrite.
+   * @return Generics Type B.
    */
   public B overwrite(boolean overwrite) {
     if (overwrite) {
@@ -248,6 +275,8 @@ public abstract class FSDataOutputStreamBuilder
 
   /**
    * Append to an existing file (optional operation).
+   *
+   * @return Generics Type B.
    */
   public B append() {
     flags.add(CreateFlag.APPEND);
@@ -260,6 +289,9 @@ public abstract class FSDataOutputStreamBuilder
 
   /**
    * Set checksum opt.
+   *
+   * @param chksumOpt check sum opt.
+   * @return Generics Type B.
    */
   public B checksumOpt(@Nonnull final ChecksumOpt chksumOpt) {
     checkNotNull(chksumOpt);

+ 9 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java

@@ -82,6 +82,7 @@ abstract public class FSInputChecker extends FSInputStream {
    * @param sum the type of Checksum engine
    * @param chunkSize maximun chunk size
    * @param checksumSize the number byte of each checksum
+   * @param verifyChecksum verify check sum.
    */
   protected FSInputChecker( Path file, int numOfRetries, 
       boolean verifyChecksum, Checksum sum, int chunkSize, int checksumSize ) {
@@ -118,6 +119,7 @@ abstract public class FSInputChecker extends FSInputStream {
    * @param len maximum number of bytes to read
    * @param checksum the data buffer into which to write checksums
    * @return number of bytes read
+   * @throws IOException raised on errors performing I/O.
    */
   abstract protected int readChunk(long pos, byte[] buf, int offset, int len,
       byte[] checksum) throws IOException;
@@ -129,7 +131,10 @@ abstract public class FSInputChecker extends FSInputStream {
    */
   abstract protected long getChunkPosition(long pos);
 
-  /** Return true if there is a need for checksum verification */
+  /**
+   * Return true if there is a need for checksum verification.
+   * @return if there is a need for checksum verification true, not false.
+   */
   protected synchronized boolean needChecksum() {
     return verifyChecksum && sum != null;
   }
@@ -357,6 +362,9 @@ abstract public class FSInputChecker extends FSInputStream {
    * Convert a checksum byte array to a long
    * This is deprecated since 0.22 since it is no longer in use
    * by this class.
+   *
+   * @param checksum check sum.
+   * @return crc.
    */
   @Deprecated
   static public long checksum2long(byte[] checksum) {

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSLinkResolver.java

@@ -74,7 +74,7 @@ public abstract class FSLinkResolver<T> {
    * @param fc FileContext used to access file systems.
    * @param path The path to resolve symlinks on.
    * @return Generic type determined by the implementation of next.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public T resolve(final FileContext fc, final Path path) throws IOException {
     int count = 0;

+ 8 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java

@@ -186,6 +186,8 @@ abstract public class FSOutputSummer extends OutputStream implements
 
   /**
    * Return the number of valid bytes currently in the buffer.
+   *
+   * @return buffer data size.
    */
   protected synchronized int getBufferedDataSize() {
     return count;
@@ -227,6 +229,10 @@ abstract public class FSOutputSummer extends OutputStream implements
 
   /**
    * Converts a checksum integer value to a byte stream
+   *
+   * @param sum check sum.
+   * @param checksumSize check sum size.
+   * @return byte stream.
    */
   static public byte[] convertToByteStream(Checksum sum, int checksumSize) {
     return int2byte((int)sum.getValue(), new byte[checksumSize]);
@@ -245,6 +251,8 @@ abstract public class FSOutputSummer extends OutputStream implements
 
   /**
    * Resets existing buffer with a new one of the specified size.
+   *
+   * @param size size.
    */
   protected synchronized void setChecksumBufSize(int size) {
     this.buf = new byte[size];

+ 21 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java

@@ -28,20 +28,37 @@ import org.apache.hadoop.io.Writable;
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public abstract class FileChecksum implements Writable {
-  /** The checksum algorithm name */
+  /**
+   * The checksum algorithm name.
+   *
+   * @return algorithm name.
+   */
   public abstract String getAlgorithmName();
 
-  /** The length of the checksum in bytes */
+  /**
+   * The length of the checksum in bytes.
+   *
+   * @return length.
+   */
   public abstract int getLength();
 
-  /** The value of the checksum in bytes */
+  /**
+   * The value of the checksum in bytes.
+   *
+   * @return byte array.
+   */
   public abstract byte[] getBytes();
 
   public ChecksumOpt getChecksumOpt() {
     return null;
   }
 
-  /** Return true if both the algorithms and the values are the same. */
+  /**
+   * Return true if both the algorithms and the values are the same.
+   *
+   * @param other other.
+   * @return if equal true, not false.
+   */
   @Override
   public boolean equals(Object other) {
     if (other == this) {

+ 73 - 39
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java

@@ -366,8 +366,8 @@ public class FileContext implements PathCapabilities {
    * Create a FileContext with specified FS as default using the specified
    * config.
    * 
-   * @param defFS
-   * @param aConf
+   * @param defFS default fs.
+   * @param aConf configutration.
    * @return new FileContext with specified FS as default.
    */
   public static FileContext getFileContext(final AbstractFileSystem defFS,
@@ -378,7 +378,7 @@ public class FileContext implements PathCapabilities {
   /**
    * Create a FileContext for specified file system using the default config.
    * 
-   * @param defaultFS
+   * @param defaultFS default fs.
    * @return a FileContext with the specified AbstractFileSystem
    *                 as the default FS.
    */
@@ -411,6 +411,7 @@ public class FileContext implements PathCapabilities {
    * 
    * @throws UnsupportedFileSystemException If the file system from the default
    *           configuration is not supported
+   * @return file context.
    */
   public static FileContext getFileContext()
       throws UnsupportedFileSystemException {
@@ -430,7 +431,7 @@ public class FileContext implements PathCapabilities {
   /**
    * Create a FileContext for specified URI using the default config.
    * 
-   * @param defaultFsUri
+   * @param defaultFsUri defaultFsUri.
    * @return a FileContext with the specified URI as the default FS.
    * 
    * @throws UnsupportedFileSystemException If the file system for
@@ -444,8 +445,8 @@ public class FileContext implements PathCapabilities {
   /**
    * Create a FileContext for specified default URI using the specified config.
    * 
-   * @param defaultFsUri
-   * @param aConf
+   * @param defaultFsUri defaultFsUri.
+   * @param aConf configrution.
    * @return new FileContext for specified uri
    * @throws UnsupportedFileSystemException If the file system with specified is
    *           not supported
@@ -476,7 +477,7 @@ public class FileContext implements PathCapabilities {
    * {@link #getFileContext(URI, Configuration)} instead of this one.
    * 
    * 
-   * @param aConf
+   * @param aConf configration.
    * @return new FileContext
    * @throws UnsupportedFileSystemException If file system in the config
    *           is not supported
@@ -554,6 +555,7 @@ public class FileContext implements PathCapabilities {
   
   /**
    * Gets the working directory for wd-relative names (such a "foo/bar").
+   * @return the path.
    */
   public Path getWorkingDirectory() {
     return workingDir;
@@ -600,13 +602,14 @@ public class FileContext implements PathCapabilities {
    * @throws FileNotFoundException  If <code>f</code> does not exist
    * @throws AccessControlException if access denied
    * @throws IOException If an IO Error occurred
-   * 
+   * @throws UnresolvedLinkException If unresolved link occurred.
+   *
    * Exceptions applicable to file systems accessed over RPC:
    * @throws RpcClientException If an exception occurred in the RPC client
    * @throws RpcServerException If an exception occurred in the RPC server
    * @throws UnexpectedServerException If server implementation throws
    *           undeclared exception to RPC server
-   * 
+   *
    * RuntimeExceptions:
    * @throws InvalidPathException If path <code>f</code> is not valid
    */
@@ -620,7 +623,7 @@ public class FileContext implements PathCapabilities {
    * A Fully-qualified path has scheme and authority specified and an absolute
    * path.
    * Use the default file system and working dir in this FileContext to qualify.
-   * @param path
+   * @param path the path.
    * @return qualified path
    */
   public Path makeQualified(final Path path) {
@@ -759,6 +762,7 @@ public class FileContext implements PathCapabilities {
    *
    * Client should expect {@link FSDataOutputStreamBuilder#build()} throw the
    * same exceptions as create(Path, EnumSet, CreateOpts...).
+   * @throws IOException If an I/O error occurred.
    */
   public FSDataOutputStreamBuilder<FSDataOutputStream, ?> create(final Path f)
       throws IOException {
@@ -832,6 +836,8 @@ public class FileContext implements PathCapabilities {
    * 
    * RuntimeExceptions:
    * @throws InvalidPathException If path <code>f</code> is invalid
+   *
+   * @return if delete success true, not false.
    */
   public boolean delete(final Path f, final boolean recursive)
       throws AccessControlException, FileNotFoundException,
@@ -862,6 +868,7 @@ public class FileContext implements PathCapabilities {
    * @throws RpcServerException If an exception occurred in the RPC server
    * @throws UnexpectedServerException If server implementation throws 
    *           undeclared exception to RPC server
+   * @return input stream.
    */
   public FSDataInputStream open(final Path f) throws AccessControlException,
       FileNotFoundException, UnsupportedFileSystemException, IOException {
@@ -892,6 +899,7 @@ public class FileContext implements PathCapabilities {
    * @throws RpcServerException If an exception occurred in the RPC server
    * @throws UnexpectedServerException If server implementation throws 
    *           undeclared exception to RPC server
+   * @return output stream.
    */
   public FSDataInputStream open(final Path f, final int bufferSize)
       throws AccessControlException, FileNotFoundException,
@@ -1001,6 +1009,7 @@ public class FileContext implements PathCapabilities {
    * 
    * @param src path to be renamed
    * @param dst new path after rename
+   * @param options rename options.
    * 
    * @throws AccessControlException If access is denied
    * @throws FileAlreadyExistsException If <code>dst</code> already exists and
@@ -1052,7 +1061,7 @@ public class FileContext implements PathCapabilities {
   
   /**
    * Set permission of a path.
-   * @param f
+   * @param f the path.
    * @param permission - the new absolute permission (umask is not applied)
    *
    * @throws AccessControlException If access is denied
@@ -1196,7 +1205,7 @@ public class FileContext implements PathCapabilities {
    * Set the verify checksum flag for the  file system denoted by the path.
    * This is only applicable if the 
    * corresponding FileSystem supports checksum. By default doesn't do anything.
-   * @param verifyChecksum
+   * @param verifyChecksum verify check sum.
    * @param f set the verifyChecksum for the Filesystem containing this path
    *
    * @throws AccessControlException If access is denied
@@ -1251,8 +1260,9 @@ public class FileContext implements PathCapabilities {
   /**
    * Synchronize client metadata state.
    *
-   * @throws IOException
-   * @throws UnsupportedOperationException
+   * @throws IOException If an I/O error occurred.
+   * @throws UnsupportedOperationException If file system for <code>f</code> is
+   *                                       not supported.
    */
   public void msync() throws IOException, UnsupportedOperationException {
     defaultFS.msync();
@@ -1613,9 +1623,12 @@ public class FileContext implements PathCapabilities {
   }
 
   /**
+   * List CorruptFile Blocks.
+   *
+   * @param path the path.
    * @return an iterator over the corrupt files under the given path
    * (may contain duplicates if a file has more than one corrupt block)
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public RemoteIterator<Path> listCorruptFileBlocks(Path path)
     throws IOException {
@@ -1739,6 +1752,7 @@ public class FileContext implements PathCapabilities {
      * @throws RpcServerException If an exception occurred in the RPC server
      * @throws UnexpectedServerException If server implementation throws 
      *           undeclared exception to RPC server
+     * @return if f exists true, not false.
      */
     public boolean exists(final Path f) throws AccessControlException,
       UnsupportedFileSystemException, IOException {
@@ -1799,6 +1813,12 @@ public class FileContext implements PathCapabilities {
     
     /**
      * See {@link #listStatus(Path[], PathFilter)}
+     *
+     * @param files files.
+     * @throws AccessControlException If access is denied.
+     * @throws FileNotFoundException If <code>files</code> does not exist.
+     * @throws IOException If an I/O error occurred.
+     * @return file status array.
      */
     public FileStatus[] listStatus(Path[] files) throws AccessControlException,
         FileNotFoundException, IOException {
@@ -2054,36 +2074,29 @@ public class FileContext implements PathCapabilities {
      *    <dt> <tt> ? </tt>
      *    <dd> Matches any single character.
      *
-     *    <p>
      *    <dt> <tt> * </tt>
      *    <dd> Matches zero or more characters.
      *
-     *    <p>
      *    <dt> <tt> [<i>abc</i>] </tt>
      *    <dd> Matches a single character from character set
      *     <tt>{<i>a,b,c</i>}</tt>.
      *
-     *    <p>
      *    <dt> <tt> [<i>a</i>-<i>b</i>] </tt>
      *    <dd> Matches a single character from the character range
      *     <tt>{<i>a...b</i>}</tt>. Note: character <tt><i>a</i></tt> must be
      *     lexicographically less than or equal to character <tt><i>b</i></tt>.
      *
-     *    <p>
      *    <dt> <tt> [^<i>a</i>] </tt>
      *    <dd> Matches a single char that is not from character set or range
      *     <tt>{<i>a</i>}</tt>.  Note that the <tt>^</tt> character must occur
      *     immediately to the right of the opening bracket.
      *
-     *    <p>
      *    <dt> <tt> \<i>c</i> </tt>
      *    <dd> Removes (escapes) any special meaning of character <i>c</i>.
      *
-     *    <p>
      *    <dt> <tt> {ab,cd} </tt>
      *    <dd> Matches a string from the string set <tt>{<i>ab, cd</i>} </tt>
-     *    
-     *    <p>
+     *
      *    <dt> <tt> {ab,c{de,fh}} </tt>
      *    <dd> Matches a string from string set <tt>{<i>ab, cde, cfh</i>}</tt>
      *
@@ -2144,6 +2157,18 @@ public class FileContext implements PathCapabilities {
     /**
      * Copy file from src to dest. See
      * {@link #copy(Path, Path, boolean, boolean)}
+     *
+     * @param src src.
+     * @param dst dst.
+     * @throws AccessControlException If access is denied.
+     * @throws FileAlreadyExistsException If file <code>src</code> already exists.
+     * @throws FileNotFoundException if next file does not exist any more.
+     * @throws ParentNotDirectoryException If parent of <code>src</code> is not a
+     * directory.
+     * @throws UnsupportedFileSystemException If file system for
+     * <code>src/dst</code> is not supported.
+     * @throws IOException If an I/O error occurred.
+     * @return if success copy true, not false.
      */
     public boolean copy(final Path src, final Path dst)
         throws AccessControlException, FileAlreadyExistsException,
@@ -2154,8 +2179,8 @@ public class FileContext implements PathCapabilities {
     
     /**
      * Copy from src to dst, optionally deleting src and overwriting dst.
-     * @param src
-     * @param dst
+     * @param src src.
+     * @param dst dst.
      * @param deleteSource - delete src if true
      * @param overwrite  overwrite dst if true; throw IOException if dst exists
      *         and overwrite is false.
@@ -2276,7 +2301,7 @@ public class FileContext implements PathCapabilities {
    * Are qualSrc and qualDst of the same file system?
    * @param qualPath1 - fully qualified path
    * @param qualPath2 - fully qualified path
-   * @return
+   * @return is same fs true,not false.
    */
   private static boolean isSameFS(Path qualPath1, Path qualPath2) {
     URI srcUri = qualPath1.toUri();
@@ -2299,6 +2324,13 @@ public class FileContext implements PathCapabilities {
   /**
    * Resolves all symbolic links in the specified path.
    * Returns the new path object.
+   *
+   * @param f the path.
+   * @throws FileNotFoundException If <code>f</code> does not exist.
+   * @throws UnresolvedLinkException If unresolved link occurred.
+   * @throws AccessControlException If access is denied.
+   * @throws IOException If an I/O error occurred.
+   * @return resolve path.
    */
   protected Path resolve(final Path f) throws FileNotFoundException,
       UnresolvedLinkException, AccessControlException, IOException {
@@ -2316,6 +2348,7 @@ public class FileContext implements PathCapabilities {
    * to, but not including the final path component.
    * @param f path to resolve
    * @return the new path object.
+   * @throws IOException If an I/O error occurred.
    */
   protected Path resolveIntermediate(final Path f) throws IOException {
     return new FSLinkResolver<FileStatus>() {
@@ -2334,7 +2367,7 @@ public class FileContext implements PathCapabilities {
    * @param f
    *          Path which needs to be resolved
    * @return List of AbstractFileSystems accessed in the path
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   Set<AbstractFileSystem> resolveAbstractFileSystems(final Path f)
       throws IOException {
@@ -2395,7 +2428,7 @@ public class FileContext implements PathCapabilities {
    * @param p Path for which delegations tokens are requested.
    * @param renewer the account name that is allowed to renew the token.
    * @return List of delegation tokens.
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   @InterfaceAudience.LimitedPrivate( { "HDFS", "MapReduce" })
   public List<Token<?>> getDelegationTokens(
@@ -2547,7 +2580,7 @@ public class FileContext implements PathCapabilities {
    * @param path Path to modify
    * @param name xattr name.
    * @param value xattr value.
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public void setXAttr(Path path, String name, byte[] value)
       throws IOException {
@@ -2566,7 +2599,7 @@ public class FileContext implements PathCapabilities {
    * @param name xattr name.
    * @param value xattr value.
    * @param flag xattr set flag
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public void setXAttr(Path path, final String name, final byte[] value,
       final EnumSet<XAttrSetFlag> flag) throws IOException {
@@ -2591,7 +2624,7 @@ public class FileContext implements PathCapabilities {
    * @param path Path to get extended attribute
    * @param name xattr name.
    * @return byte[] xattr value.
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public byte[] getXAttr(Path path, final String name) throws IOException {
     final Path absF = fixRelativePart(path);
@@ -2614,7 +2647,7 @@ public class FileContext implements PathCapabilities {
    * @param path Path to get extended attributes
    * @return Map{@literal <}String, byte[]{@literal >} describing the XAttrs
    * of the file or directory
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public Map<String, byte[]> getXAttrs(Path path) throws IOException {
     final Path absF = fixRelativePart(path);
@@ -2638,7 +2671,7 @@ public class FileContext implements PathCapabilities {
    * @param names XAttr names.
    * @return Map{@literal <}String, byte[]{@literal >} describing the XAttrs
    * of the file or directory
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public Map<String, byte[]> getXAttrs(Path path, final List<String> names)
       throws IOException {
@@ -2661,7 +2694,7 @@ public class FileContext implements PathCapabilities {
    *
    * @param path Path to remove extended attribute
    * @param name xattr name
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public void removeXAttr(Path path, final String name) throws IOException {
     final Path absF = fixRelativePart(path);
@@ -2685,7 +2718,7 @@ public class FileContext implements PathCapabilities {
    * @param path Path to get extended attributes
    * @return List{@literal <}String{@literal >} of the XAttr names of the
    * file or directory
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public List<String> listXAttrs(Path path) throws IOException {
     final Path absF = fixRelativePart(path);
@@ -2802,7 +2835,7 @@ public class FileContext implements PathCapabilities {
   /**
    * Set the source path to satisfy storage policy.
    * @param path The source path referring to either a directory or a file.
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public void satisfyStoragePolicy(final Path path)
       throws IOException {
@@ -2824,6 +2857,7 @@ public class FileContext implements PathCapabilities {
    * @param policyName the name of the target storage policy. The list
    *                   of supported Storage policies can be retrieved
    *                   via {@link #getAllStoragePolicies}.
+   * @throws IOException If an I/O error occurred.
    */
   public void setStoragePolicy(final Path path, final String policyName)
       throws IOException {
@@ -2841,7 +2875,7 @@ public class FileContext implements PathCapabilities {
   /**
    * Unset the storage policy set for a given file or directory.
    * @param src file or directory path.
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public void unsetStoragePolicy(final Path src) throws IOException {
     final Path absF = fixRelativePart(src);
@@ -2860,7 +2894,7 @@ public class FileContext implements PathCapabilities {
    *
    * @param path file or directory path.
    * @return storage policy for give file.
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public BlockStoragePolicySpi getStoragePolicy(Path path) throws IOException {
     final Path absF = fixRelativePart(path);
@@ -2878,7 +2912,7 @@ public class FileContext implements PathCapabilities {
    * Retrieve all the storage policies supported by this file system.
    *
    * @return all storage policies supported by this filesystem.
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public Collection<? extends BlockStoragePolicySpi> getAllStoragePolicies()
       throws IOException {

+ 3 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java

@@ -52,6 +52,7 @@ public class FileEncryptionInfo implements Serializable {
    * @param keyName name of the key used for the encryption zone
    * @param ezKeyVersionName name of the KeyVersion used to encrypt the
    *                         encrypted data encryption key.
+   * @param version version.
    */
   public FileEncryptionInfo(final CipherSuite suite,
       final CryptoProtocolVersion version, final byte[] edek,
@@ -134,6 +135,8 @@ public class FileEncryptionInfo implements Serializable {
    *
    * NOTE:
    * Currently this method is used by CLI for backward compatibility.
+   *
+   * @return stable string.
    */
   public String toStringStable() {
     StringBuilder builder = new StringBuilder("{")

+ 14 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java

@@ -116,6 +116,17 @@ public class FileStatus implements Writable, Comparable<Object>,
 
   /**
    * Constructor for file systems on which symbolic links are not supported
+   *
+   * @param length length.
+   * @param isdir isdir.
+   * @param block_replication block replication.
+   * @param blocksize block size.
+   * @param modification_time modification time.
+   * @param access_time access_time.
+   * @param permission permission.
+   * @param owner owner.
+   * @param group group.
+   * @param path the path.
    */
   public FileStatus(long length, boolean isdir,
                     int block_replication,
@@ -182,6 +193,7 @@ public class FileStatus implements Writable, Comparable<Object>,
    * Copy constructor.
    *
    * @param other FileStatus to copy
+   * @throws IOException raised on errors performing I/O.
    */
   public FileStatus(FileStatus other) throws IOException {
     // It's important to call the getters here instead of directly accessing the
@@ -375,6 +387,8 @@ public class FileStatus implements Writable, Comparable<Object>,
 
   /**
    * @return The contents of the symbolic link.
+   *
+   * @throws IOException raised on errors performing I/O.
    */
   public Path getSymlink() throws IOException {
     if (!isSymlink()) {

+ 139 - 31
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java

@@ -104,13 +104,13 @@ import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapa
  * All user code that may potentially use the Hadoop Distributed
  * File System should be written to use a FileSystem object or its
  * successor, {@link FileContext}.
- *
+ * </p>
  * <p>
  * The local implementation is {@link LocalFileSystem} and distributed
  * implementation is DistributedFileSystem. There are other implementations
  * for object stores and (outside the Apache Hadoop codebase),
  * third party filesystems.
- * <p>
+ * </p>
  * Notes
  * <ol>
  * <li>The behaviour of the filesystem is
@@ -133,13 +133,12 @@ import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapa
  * New methods may be marked as Unstable or Evolving for their initial release,
  * as a warning that they are new and may change based on the
  * experience of use in applications.
- * <p></p>
+ * <p>
  * <b>Important note for developers</b>
- * <p></p>
+ * </p>
  * If you are making changes here to the public API or protected methods,
  * you must review the following subclasses and make sure that
  * they are filtering/passing through new methods as appropriate.
- * <p></p>
  *
  * {@link FilterFileSystem}: methods are passed through. If not,
  * then {@code TestFilterFileSystem.MustNotImplement} must be
@@ -148,21 +147,22 @@ import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapa
  * {@link #hasPathCapability(Path, String)} then
  * {@link FilterFileSystem#hasPathCapability(Path, String)}
  * must return false, always.
- * <p></p>
+ * <p>
  * {@link ChecksumFileSystem}: checksums are created and
  * verified.
- * <p></p>
+ * </p>
  * {@code TestHarFileSystem} will need its {@code MustNotImplement}
  * interface updated.
- * <p></p>
  *
+ * <p>
  * There are some external places your changes will break things.
  * Do co-ordinate changes here.
- * <p></p>
+ * </p>
  *
  * HBase: HBoss
- * <p></p>
+ * <p>
  * Hive: HiveShim23
+ * </p>
  * {@code shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java}
  *
  *****************************************************************/
@@ -281,6 +281,8 @@ public abstract class FileSystem extends Configured
   /**
    * Returns the configured FileSystem implementation.
    * @param conf the configuration to use
+   * @return FileSystem.
+   * @throws IOException If an I/O error occurred.
    */
   public static FileSystem get(Configuration conf) throws IOException {
     return get(getDefaultUri(conf), conf);
@@ -375,6 +377,7 @@ public abstract class FileSystem extends Configured
    * implement that method.
    *
    * @see #canonicalizeUri(URI)
+   * @return the URI of this filesystem.
    */
   protected URI getCanonicalUri() {
     return canonicalizeUri(getUri());
@@ -391,6 +394,7 @@ public abstract class FileSystem extends Configured
    * not specified and if {@link #getDefaultPort()} returns a
    * default port.
    *
+   * @param uri url.
    * @return URI
    * @see NetUtils#getCanonicalUri(URI, int)
    */
@@ -454,11 +458,21 @@ public abstract class FileSystem extends Configured
       : null;
   }
 
-  /** @deprecated call {@link #getUri()} instead.*/
+  /**
+   * @return uri to string.
+   * @deprecated call {@link #getUri()} instead.
+   */
   @Deprecated
   public String getName() { return getUri().toString(); }
 
-  /** @deprecated call {@link #get(URI, Configuration)} instead. */
+  /**
+   * @deprecated call {@link #get(URI, Configuration)} instead.
+   *
+   * @param name name.
+   * @param conf configuration.
+   * @return file system.
+   * @throws IOException If an I/O error occurred.
+   */
   @Deprecated
   public static FileSystem getNamed(String name, Configuration conf)
     throws IOException {
@@ -513,6 +527,9 @@ public abstract class FileSystem extends Configured
    *   configuration and URI, cached and returned to the caller.
    * </li>
    * </ol>
+   * @param uri uri of the filesystem.
+   * @param conf configrution.
+   * @return filesystem instance.
    * @throws IOException if the FileSystem cannot be instantiated.
    */
   public static FileSystem get(URI uri, Configuration conf) throws IOException {
@@ -542,7 +559,7 @@ public abstract class FileSystem extends Configured
   /**
    * Returns the FileSystem for this URI's scheme and authority and the
    * given user. Internally invokes {@link #newInstance(URI, Configuration)}
-   * @param uri of the filesystem
+   * @param uri uri of the filesystem.
    * @param conf the configuration to use
    * @param user to perform the get as
    * @return filesystem instance
@@ -860,6 +877,7 @@ public abstract class FileSystem extends Configured
    * @param start offset into the given file
    * @param len length for which to get locations for
    * @throws IOException IO failure
+   * @return block location array.
    */
   public BlockLocation[] getFileBlockLocations(FileStatus file,
       long start, long len) throws IOException {
@@ -900,6 +918,7 @@ public abstract class FileSystem extends Configured
    * @param len length for which to get locations for
    * @throws FileNotFoundException when the path does not exist
    * @throws IOException IO failure
+   * @return block location array.
    */
   public BlockLocation[] getFileBlockLocations(Path p,
       long start, long len) throws IOException {
@@ -962,6 +981,7 @@ public abstract class FileSystem extends Configured
    * @param f the file name to open
    * @param bufferSize the size of the buffer to be used.
    * @throws IOException IO failure
+   * @return input stream.
    */
   public abstract FSDataInputStream open(Path f, int bufferSize)
     throws IOException;
@@ -970,6 +990,7 @@ public abstract class FileSystem extends Configured
    * Opens an FSDataInputStream at the indicated Path.
    * @param f the file to open
    * @throws IOException IO failure
+   * @return input stream.
    */
   public FSDataInputStream open(Path f) throws IOException {
     return open(f, getConf().getInt(IO_FILE_BUFFER_SIZE_KEY,
@@ -987,6 +1008,7 @@ public abstract class FileSystem extends Configured
    * @throws IOException IO failure
    * @throws UnsupportedOperationException If {@link #open(PathHandle, int)}
    *                                       not overridden by subclass
+   * @return input stream.
    */
   public FSDataInputStream open(PathHandle fd) throws IOException {
     return open(fd, getConf().getInt(IO_FILE_BUFFER_SIZE_KEY,
@@ -1004,6 +1026,7 @@ public abstract class FileSystem extends Configured
    *                                    not satisfied
    * @throws IOException IO failure
    * @throws UnsupportedOperationException If not overridden by subclass
+   * @return input stream.
    */
   public FSDataInputStream open(PathHandle fd, int bufferSize)
       throws IOException {
@@ -1021,6 +1044,7 @@ public abstract class FileSystem extends Configured
    *         not overridden by subclass.
    * @throws UnsupportedOperationException If this FileSystem cannot enforce
    *         the specified constraints.
+   * @return path handle.
    */
   public final PathHandle getPathHandle(FileStatus stat, HandleOpt... opt) {
     // method is final with a default so clients calling getPathHandle(stat)
@@ -1036,6 +1060,7 @@ public abstract class FileSystem extends Configured
    * @param stat Referent in the target FileSystem
    * @param opt Constraints that determine the validity of the
    *            {@link PathHandle} reference.
+   * @return path handle.
    */
   protected PathHandle createPathHandle(FileStatus stat, HandleOpt... opt) {
     throw new UnsupportedOperationException();
@@ -1046,6 +1071,7 @@ public abstract class FileSystem extends Configured
    * Files are overwritten by default.
    * @param f the file to create
    * @throws IOException IO failure
+   * @return output stream.
    */
   public FSDataOutputStream create(Path f) throws IOException {
     return create(f, true);
@@ -1057,6 +1083,7 @@ public abstract class FileSystem extends Configured
    * @param overwrite if a file with this name already exists, then if true,
    *   the file will be overwritten, and if false an exception will be thrown.
    * @throws IOException IO failure
+   * @return output stream.
    */
   public FSDataOutputStream create(Path f, boolean overwrite)
       throws IOException {
@@ -1074,6 +1101,7 @@ public abstract class FileSystem extends Configured
    * @param f the file to create
    * @param progress to report progress
    * @throws IOException IO failure
+   * @return output stream.
    */
   public FSDataOutputStream create(Path f, Progressable progress)
       throws IOException {
@@ -1090,6 +1118,7 @@ public abstract class FileSystem extends Configured
    * @param f the file to create
    * @param replication the replication factor
    * @throws IOException IO failure
+   * @return output stream1
    */
   public FSDataOutputStream create(Path f, short replication)
       throws IOException {
@@ -1108,6 +1137,7 @@ public abstract class FileSystem extends Configured
    * @param replication the replication factor
    * @param progress to report progress
    * @throws IOException IO failure
+   * @return output stream.
    */
   public FSDataOutputStream create(Path f, short replication,
       Progressable progress) throws IOException {
@@ -1125,6 +1155,7 @@ public abstract class FileSystem extends Configured
    *   the file will be overwritten, and if false an error will be thrown.
    * @param bufferSize the size of the buffer to be used.
    * @throws IOException IO failure
+   * @return output stream.
    */
   public FSDataOutputStream create(Path f,
                                    boolean overwrite,
@@ -1144,7 +1175,9 @@ public abstract class FileSystem extends Configured
    * @param overwrite if a file with this name already exists, then if true,
    *   the file will be overwritten, and if false an error will be thrown.
    * @param bufferSize the size of the buffer to be used.
+   * @param progress to report progress.
    * @throws IOException IO failure
+   * @return output stream.
    */
   public FSDataOutputStream create(Path f,
                                    boolean overwrite,
@@ -1164,7 +1197,9 @@ public abstract class FileSystem extends Configured
    *   the file will be overwritten, and if false an error will be thrown.
    * @param bufferSize the size of the buffer to be used.
    * @param replication required block replication for the file.
+   * @param blockSize the size of the buffer to be used.
    * @throws IOException IO failure
+   * @return output stream.
    */
   public FSDataOutputStream create(Path f,
       boolean overwrite,
@@ -1182,7 +1217,10 @@ public abstract class FileSystem extends Configured
    *   the file will be overwritten, and if false an error will be thrown.
    * @param bufferSize the size of the buffer to be used.
    * @param replication required block replication for the file.
+   * @param blockSize the size of the buffer to be used.
+   * @param progress to report progress.
    * @throws IOException IO failure
+   * @return output stream.
    */
   public FSDataOutputStream create(Path f,
                                             boolean overwrite,
@@ -1209,6 +1247,7 @@ public abstract class FileSystem extends Configured
    * @param progress the progress reporter
    * @throws IOException IO failure
    * @see #setPermission(Path, FsPermission)
+   * @return output stream.
    */
   public abstract FSDataOutputStream create(Path f,
       FsPermission permission,
@@ -1230,6 +1269,7 @@ public abstract class FileSystem extends Configured
    * @param progress the progress reporter
    * @throws IOException IO failure
    * @see #setPermission(Path, FsPermission)
+   * @return output stream.
    */
   public FSDataOutputStream create(Path f,
       FsPermission permission,
@@ -1256,6 +1296,7 @@ public abstract class FileSystem extends Configured
    *        found in conf will be used.
    * @throws IOException IO failure
    * @see #setPermission(Path, FsPermission)
+   * @return output stream.
    */
   public FSDataOutputStream create(Path f,
       FsPermission permission,
@@ -1277,6 +1318,16 @@ public abstract class FileSystem extends Configured
    * the permission with umask before calling this method.
    * This a temporary method added to support the transition from FileSystem
    * to FileContext for user applications.
+   *
+   * @param f path.
+   * @param absolutePermission permission.
+   * @param flag create flag.
+   * @param bufferSize buffer size.
+   * @param replication replication.
+   * @param blockSize block size.
+   * @param progress progress.
+   * @param checksumOpt check sum opt.
+   * @return output stream.
    * @throws IOException IO failure
    */
   @Deprecated
@@ -1331,6 +1382,11 @@ public abstract class FileSystem extends Configured
    * with umask before calling this method.
    * This a temporary method added to support the transition from FileSystem
    * to FileContext for user applications.
+   *
+   * @param f the path.
+   * @param absolutePermission permission.
+   * @param createParent create parent.
+   * @throws IOException IO failure.
    */
   @Deprecated
   protected void primitiveMkdir(Path f, FsPermission absolutePermission,
@@ -1370,6 +1426,7 @@ public abstract class FileSystem extends Configured
    * @param progress the progress reporter
    * @throws IOException IO failure
    * @see #setPermission(Path, FsPermission)
+   * @return output stream.
    */
   public FSDataOutputStream createNonRecursive(Path f,
       boolean overwrite,
@@ -1393,6 +1450,7 @@ public abstract class FileSystem extends Configured
    * @param progress the progress reporter
    * @throws IOException IO failure
    * @see #setPermission(Path, FsPermission)
+   * @return output stream.
    */
    public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
        boolean overwrite, int bufferSize, short replication, long blockSize,
@@ -1416,6 +1474,7 @@ public abstract class FileSystem extends Configured
     * @param progress the progress reporter
     * @throws IOException IO failure
     * @see #setPermission(Path, FsPermission)
+    * @return output stream.
     */
     public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
         EnumSet<CreateFlag> flags, int bufferSize, short replication, long blockSize,
@@ -1430,6 +1489,7 @@ public abstract class FileSystem extends Configured
    * <i>Important: the default implementation is not atomic</i>
    * @param f path to use for create
    * @throws IOException IO failure
+   * @return if create new file success true,not false.
    */
   public boolean createNewFile(Path f) throws IOException {
     if (exists(f)) {
@@ -1450,6 +1510,7 @@ public abstract class FileSystem extends Configured
    * @throws IOException IO failure
    * @throws UnsupportedOperationException if the operation is unsupported
    *         (default).
+   * @return output stream.
    */
   public FSDataOutputStream append(Path f) throws IOException {
     return append(f, getConf().getInt(IO_FILE_BUFFER_SIZE_KEY,
@@ -1464,6 +1525,7 @@ public abstract class FileSystem extends Configured
    * @throws IOException IO failure
    * @throws UnsupportedOperationException if the operation is unsupported
    *         (default).
+   * @return output stream.
    */
   public FSDataOutputStream append(Path f, int bufferSize) throws IOException {
     return append(f, bufferSize, null);
@@ -1477,6 +1539,7 @@ public abstract class FileSystem extends Configured
    * @throws IOException IO failure
    * @throws UnsupportedOperationException if the operation is unsupported
    *         (default).
+   * @return output stream.
    */
   public abstract FSDataOutputStream append(Path f, int bufferSize,
       Progressable progress) throws IOException;
@@ -1515,7 +1578,7 @@ public abstract class FileSystem extends Configured
    * This is the default behavior.
    * @param src file name
    * @param replication new replication
-   * @throws IOException
+   * @throws IOException an IO failure.
    * @return true if successful, or the feature in unsupported;
    *         false if replication is supported but the file does not exist,
    *         or is a directory
@@ -1544,11 +1607,12 @@ public abstract class FileSystem extends Configured
    * <p>
    * If OVERWRITE option is not passed as an argument, rename fails
    * if the dst already exists.
+   * </p>
    * <p>
    * If OVERWRITE option is passed as an argument, rename overwrites
    * the dst if it is a file or an empty directory. Rename fails if dst is
    * a non-empty directory.
-   * <p>
+   * </p>
    * Note that atomicity of rename is dependent on the file system
    * implementation. Please refer to the file system documentation for
    * details. This default implementation is non atomic.
@@ -1556,9 +1620,11 @@ public abstract class FileSystem extends Configured
    * This method is deprecated since it is a temporary method added to
    * support the transition from FileSystem to FileContext for user
    * applications.
+   * </p>
    *
    * @param src path to be renamed
    * @param dst new path after rename
+   * @param options rename options.
    * @throws FileNotFoundException src path does not exist, or the parent
    * path of dst does not exist.
    * @throws FileAlreadyExistsException dest path exists and is a file
@@ -1653,6 +1719,9 @@ public abstract class FileSystem extends Configured
 
   /**
    * Delete a file/directory.
+   * @param f the path.
+   * @throws IOException IO failure.
+   * @return if delete success true, not false.
    * @deprecated Use {@link #delete(Path, boolean)} instead.
    */
   @Deprecated
@@ -1769,6 +1838,7 @@ public abstract class FileSystem extends Configured
    * @param f path to check
    * @throws IOException IO failure
    * @deprecated Use {@link #getFileStatus(Path)} instead
+   * @return if f is directory true, not false.
    */
   @Deprecated
   public boolean isDirectory(Path f) throws IOException {
@@ -1786,6 +1856,7 @@ public abstract class FileSystem extends Configured
    * @param f path to check
    * @throws IOException IO failure
    * @deprecated Use {@link #getFileStatus(Path)} instead
+   * @return if f is file true, not false.
    */
   @Deprecated
   public boolean isFile(Path f) throws IOException {
@@ -1798,6 +1869,7 @@ public abstract class FileSystem extends Configured
 
   /**
    * The number of bytes in a file.
+   * @param f the path.
    * @return the number of bytes; 0 for a directory
    * @deprecated Use {@link #getFileStatus(Path)} instead.
    * @throws FileNotFoundException if the path does not resolve
@@ -1812,6 +1884,7 @@ public abstract class FileSystem extends Configured
    * @param f path to use
    * @throws FileNotFoundException if the path does not resolve
    * @throws IOException IO failure
+   * @return content summary.
    */
   public ContentSummary getContentSummary(Path f) throws IOException {
     FileStatus status = getFileStatus(f);
@@ -1946,9 +2019,9 @@ public abstract class FileSystem extends Configured
    * @param f Path to list
    * @param token opaque iteration token returned by previous call, or null
    *              if this is the first call.
-   * @return
-   * @throws FileNotFoundException
-   * @throws IOException
+   * @return directory entries.
+   * @throws FileNotFoundException when the path does not exist.
+   * @throws IOException If an I/O error occurred.
    */
   @InterfaceAudience.Private
   protected DirectoryEntries listStatusBatch(Path f, byte[] token) throws
@@ -1979,6 +2052,8 @@ public abstract class FileSystem extends Configured
 
   /**
    * List corrupted file blocks.
+   *
+   * @param path the path.
    * @return an iterator over the corrupt files under the given path
    * (may contain duplicates if a file has more than one corrupt block)
    * @throws UnsupportedOperationException if the operation is unsupported
@@ -2072,36 +2147,29 @@ public abstract class FileSystem extends Configured
    *    <dt> <tt> ? </tt>
    *    <dd> Matches any single character.
    *
-   *    <p>
    *    <dt> <tt> * </tt>
    *    <dd> Matches zero or more characters.
    *
-   *    <p>
    *    <dt> <tt> [<i>abc</i>] </tt>
    *    <dd> Matches a single character from character set
    *     <tt>{<i>a,b,c</i>}</tt>.
    *
-   *    <p>
    *    <dt> <tt> [<i>a</i>-<i>b</i>] </tt>
    *    <dd> Matches a single character from the character range
    *     <tt>{<i>a...b</i>}</tt>.  Note that character <tt><i>a</i></tt> must be
    *     lexicographically less than or equal to character <tt><i>b</i></tt>.
    *
-   *    <p>
    *    <dt> <tt> [^<i>a</i>] </tt>
    *    <dd> Matches a single character that is not from character set or range
    *     <tt>{<i>a</i>}</tt>.  Note that the <tt>^</tt> character must occur
    *     immediately to the right of the opening bracket.
    *
-   *    <p>
    *    <dt> <tt> \<i>c</i> </tt>
    *    <dd> Removes (escapes) any special meaning of character <i>c</i>.
    *
-   *    <p>
    *    <dt> <tt> {ab,cd} </tt>
    *    <dd> Matches a string from the string set <tt>{<i>ab, cd</i>} </tt>
    *
-   *    <p>
    *    <dt> <tt> {ab,c{de,fh}} </tt>
    *    <dd> Matches a string from the string set <tt>{<i>ab, cde, cfh</i>}</tt>
    *
@@ -2332,6 +2400,7 @@ public abstract class FileSystem extends Configured
 
   /** Return the current user's home directory in this FileSystem.
    * The default implementation returns {@code "/user/$USER/"}.
+   * @return the path.
    */
   public Path getHomeDirectory() {
     String username;
@@ -2394,6 +2463,7 @@ public abstract class FileSystem extends Configured
    * @param f path to create
    * @param permission to apply to f
    * @throws IOException IO failure
+   * @return if mkdir success true, not false.
    */
   public abstract boolean mkdirs(Path f, FsPermission permission
       ) throws IOException;
@@ -2441,6 +2511,7 @@ public abstract class FileSystem extends Configured
    * @param delSrc whether to delete the src
    * @param src path
    * @param dst path
+   * @throws IOException IO failure.
    */
   public void copyFromLocalFile(boolean delSrc, Path src, Path dst)
     throws IOException {
@@ -2555,6 +2626,7 @@ public abstract class FileSystem extends Configured
    * @param fsOutputFile path of output file
    * @param tmpLocalFile path of local tmp file
    * @throws IOException IO failure
+   * @return the path.
    */
   public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile)
     throws IOException {
@@ -2602,6 +2674,7 @@ public abstract class FileSystem extends Configured
   /**
    * Return the total size of all files in the filesystem.
    * @throws IOException IO failure
+   * @return the number of path used.
    */
   public long getUsed() throws IOException {
     Path path = new Path("/");
@@ -2610,7 +2683,9 @@ public abstract class FileSystem extends Configured
 
   /**
    * Return the total size of all files from a specified path.
+   * @param path the path.
    * @throws IOException IO failure
+   * @return the number of path content summary.
    */
   public long getUsed(Path path) throws IOException {
     return getContentSummary(path).getLength();
@@ -2633,6 +2708,7 @@ public abstract class FileSystem extends Configured
    * Return the number of bytes that large input files should be optimally
    * be split into to minimize I/O time.
    * @deprecated use {@link #getDefaultBlockSize(Path)} instead
+   * @return default block size.
    */
   @Deprecated
   public long getDefaultBlockSize() {
@@ -2685,8 +2761,8 @@ public abstract class FileSystem extends Configured
    * In some FileSystem implementations such as HDFS metadata
    * synchronization is essential to guarantee consistency of read requests
    * particularly in HA setting.
-   * @throws IOException
-   * @throws UnsupportedOperationException
+   * @throws IOException If an I/O error occurred.
+   * @throws UnsupportedOperationException if the operation is unsupported.
    */
   public void msync() throws IOException, UnsupportedOperationException {
     throw new UnsupportedOperationException(getClass().getCanonicalName() +
@@ -2762,6 +2838,8 @@ public abstract class FileSystem extends Configured
 
   /**
    * See {@link FileContext#fixRelativePart}.
+   * @param p the path.
+   * @return relative part.
    */
   protected Path fixRelativePart(Path p) {
     if (p.isUriPathAbsolute()) {
@@ -2773,6 +2851,18 @@ public abstract class FileSystem extends Configured
 
   /**
    * See {@link FileContext#createSymlink(Path, Path, boolean)}.
+   *
+   * @param target target path.
+   * @param link link.
+   * @param createParent create parent.
+   * @throws AccessControlException if access is denied.
+   * @throws FileAlreadyExistsException when the path does not exist.
+   * @throws FileNotFoundException when the path does not exist.
+   * @throws ParentNotDirectoryException if the parent path of dest is not
+   *                                     a directory.
+   * @throws UnsupportedFileSystemException if there was no known implementation
+   *                                        for the scheme.
+   * @throws IOException raised on errors performing I/O.
    */
   public void createSymlink(final Path target, final Path link,
       final boolean createParent) throws AccessControlException,
@@ -2786,8 +2876,14 @@ public abstract class FileSystem extends Configured
 
   /**
    * See {@link FileContext#getFileLinkStatus(Path)}.
-   * @throws FileNotFoundException when the path does not exist
-   * @throws IOException see specific implementation
+   *
+   * @param f the path.
+   * @throws AccessControlException if access is denied.
+   * @throws FileNotFoundException when the path does not exist.
+   * @throws IOException raised on errors performing I/O.
+   * @throws UnsupportedFileSystemException if there was no known implementation
+   *                                        for the scheme.
+   * @return file status
    */
   public FileStatus getFileLinkStatus(final Path f)
       throws AccessControlException, FileNotFoundException,
@@ -2798,6 +2894,7 @@ public abstract class FileSystem extends Configured
 
   /**
    * See {@link AbstractFileSystem#supportsSymlinks()}.
+   * @return if support symlinkls true, not false.
    */
   public boolean supportsSymlinks() {
     return false;
@@ -2805,8 +2902,11 @@ public abstract class FileSystem extends Configured
 
   /**
    * See {@link FileContext#getLinkTarget(Path)}.
+   * @param f the path.
    * @throws UnsupportedOperationException if the operation is unsupported
    *         (default outcome).
+   * @throws IOException IO failure.
+   * @return the path.
    */
   public Path getLinkTarget(Path f) throws IOException {
     // Supporting filesystems should override this method
@@ -2816,8 +2916,11 @@ public abstract class FileSystem extends Configured
 
   /**
    * See {@link AbstractFileSystem#getLinkTarget(Path)}.
+   * @param f the path.
    * @throws UnsupportedOperationException if the operation is unsupported
    *         (default outcome).
+   * @throws IOException IO failure.
+   * @return the path.
    */
   protected Path resolveLink(Path f) throws IOException {
     // Supporting filesystems should override this method
@@ -3221,7 +3324,7 @@ public abstract class FileSystem extends Configured
   /**
    * Set the source path to satisfy storage policy.
    * @param path The source path referring to either a directory or a file.
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public void satisfyStoragePolicy(final Path path) throws IOException {
     throw new UnsupportedOperationException(
@@ -3529,7 +3632,7 @@ public abstract class FileSystem extends Configured
      * @param conf configuration
      * @param key key to store/retrieve this FileSystem in the cache
      * @return a cached or newly instantiated FileSystem.
-     * @throws IOException
+     * @throws IOException If an I/O error occurred.
      */
     private FileSystem getInternal(URI uri, Configuration conf, Key key)
         throws IOException{
@@ -4024,6 +4127,7 @@ public abstract class FileSystem extends Configured
 
     /**
      * Get or create the thread-local data associated with the current thread.
+     * @return statistics data.
      */
     public StatisticsData getThreadStatistics() {
       StatisticsData data = threadData.get();
@@ -4382,6 +4486,7 @@ public abstract class FileSystem extends Configured
   /**
    * Return the FileSystem classes that have Statistics.
    * @deprecated use {@link #getGlobalStorageStatistics()}
+   * @return statistics lists.
    */
   @Deprecated
   public static synchronized List<Statistics> getAllStatistics() {
@@ -4390,6 +4495,7 @@ public abstract class FileSystem extends Configured
 
   /**
    * Get the statistics for a particular file system.
+   * @param scheme scheme.
    * @param cls the class to lookup
    * @return a statistics object
    * @deprecated use {@link #getGlobalStorageStatistics()}
@@ -4424,6 +4530,7 @@ public abstract class FileSystem extends Configured
 
   /**
    * Print all statistics for all file systems to {@code System.out}
+   * @throws IOException If an I/O error occurred.
    */
   public static synchronized
   void printStatistics() throws IOException {
@@ -4464,6 +4571,7 @@ public abstract class FileSystem extends Configured
 
   /**
    * Get the global storage statistics.
+   * @return global storage statistics.
    */
   public static GlobalStorageStatistics getGlobalStorageStatistics() {
     return GlobalStorageStatistics.INSTANCE;

+ 4 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystemLinkResolver.java

@@ -38,8 +38,8 @@ public abstract class FileSystemLinkResolver<T> {
    * an UnresolvedLinkException if called on an unresolved {@link Path}.
    * @param p Path on which to perform an operation
    * @return Generic type returned by operation
-   * @throws IOException
-   * @throws UnresolvedLinkException
+   * @throws IOException raised on errors performing I/O.
+   * @throws UnresolvedLinkException unresolved link exception.
    */
   abstract public T doCall(final Path p) throws IOException,
       UnresolvedLinkException;
@@ -54,7 +54,7 @@ public abstract class FileSystemLinkResolver<T> {
    * @param p
    *          Resolved Target of path
    * @return Generic type determined by implementation
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   abstract public T next(final FileSystem fs, final Path p) throws IOException;
 
@@ -66,7 +66,7 @@ public abstract class FileSystemLinkResolver<T> {
    * @param filesys FileSystem with which to try call
    * @param path Path with which to try call
    * @return Generic type determined by implementation
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public T resolve(final FileSystem filesys, final Path path)
       throws IOException {

+ 70 - 17
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java

@@ -162,6 +162,8 @@ public class FileUtil {
    * (3) If dir is a normal file, it is deleted.
    * (4) If dir is a normal directory, then dir and all its contents recursively
    *     are deleted.
+   * @param dir dir.
+   * @return fully delete status.
    */
   public static boolean fullyDelete(final File dir) {
     return fullyDelete(dir, false);
@@ -257,6 +259,9 @@ public class FileUtil {
    * we return false, the directory may be partially-deleted.
    * If dir is a symlink to a directory, all the contents of the actual
    * directory pointed to by dir will be deleted.
+   *
+   * @param dir dir.
+   * @return fullyDeleteContents Status.
    */
   public static boolean fullyDeleteContents(final File dir) {
     return fullyDeleteContents(dir, false);
@@ -267,8 +272,11 @@ public class FileUtil {
    * we return false, the directory may be partially-deleted.
    * If dir is a symlink to a directory, all the contents of the actual
    * directory pointed to by dir will be deleted.
+   *
+   * @param dir dir.
    * @param tryGrantPermissions if 'true', try grant +rwx permissions to this
    * and all the underlying directories before trying to delete their contents.
+   * @return fully delete contents status.
    */
   public static boolean fullyDeleteContents(final File dir, final boolean tryGrantPermissions) {
     if (tryGrantPermissions) {
@@ -311,7 +319,7 @@ public class FileUtil {
    *
    * @param fs {@link FileSystem} on which the path is present
    * @param dir directory to recursively delete
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    * @deprecated Use {@link FileSystem#delete(Path, boolean)}
    */
   @Deprecated
@@ -343,7 +351,17 @@ public class FileUtil {
     }
   }
 
-  /** Copy files between FileSystems. */
+  /**
+   * Copy files between FileSystems.
+   * @param srcFS src fs.
+   * @param src src.
+   * @param dstFS dst fs.
+   * @param dst dst.
+   * @param deleteSource delete source.
+   * @param conf configuration.
+   * @return if copy success true, not false.
+   * @throws IOException raised on errors performing I/O.
+   */
   public static boolean copy(FileSystem srcFS, Path src,
                              FileSystem dstFS, Path dst,
                              boolean deleteSource,
@@ -391,7 +409,19 @@ public class FileUtil {
     return returnVal;
   }
 
-  /** Copy files between FileSystems. */
+  /**
+   * Copy files between FileSystems.
+   *
+   * @param srcFS srcFs.
+   * @param src src.
+   * @param dstFS dstFs.
+   * @param dst dst.
+   * @param deleteSource delete source.
+   * @param overwrite overwrite.
+   * @param conf configuration.
+   * @throws IOException raised on errors performing I/O.
+   * @return true if the operation succeeded.
+   */
   public static boolean copy(FileSystem srcFS, Path src,
                              FileSystem dstFS, Path dst,
                              boolean deleteSource,
@@ -403,20 +433,21 @@ public class FileUtil {
 
   /**
    * Copy a file/directory tree within/between filesystems.
-   * <p></p>
+   * <p>
    * returns true if the operation succeeded. When deleteSource is true,
    * this means "after the copy, delete(source) returned true"
    * If the destination is a directory, and mkdirs (dest) fails,
    * the operation will return false rather than raise any exception.
-   * <p></p>
+   * </p>
    * The overwrite flag is about overwriting files; it has no effect about
    * handing an attempt to copy a file atop a directory (expect an IOException),
    * or a directory over a path which contains a file (mkdir will fail, so
    * "false").
-   * <p></p>
+   * <p>
    * The operation is recursive, and the deleteSource operation takes place
    * as each subdirectory is copied. Therefore, if an operation fails partway
    * through, the source tree may be partially deleted.
+   * </p>
    * @param srcFS source filesystem
    * @param srcStatus status of source
    * @param dstFS destination filesystem
@@ -471,7 +502,17 @@ public class FileUtil {
 
   }
 
-  /** Copy local files to a FileSystem. */
+  /**
+   * Copy local files to a FileSystem.
+   *
+   * @param src src.
+   * @param dstFS dstFs.
+   * @param dst dst.
+   * @param deleteSource delete source.
+   * @param conf configuration.
+   * @throws IOException raised on errors performing I/O.
+   * @return true if the operation succeeded.
+   */
   public static boolean copy(File src,
                              FileSystem dstFS, Path dst,
                              boolean deleteSource,
@@ -514,7 +555,17 @@ public class FileUtil {
     }
   }
 
-  /** Copy FileSystem files to local files. */
+  /**
+   * Copy FileSystem files to local files.
+   *
+   * @param srcFS srcFs.
+   * @param src src.
+   * @param dst dst.
+   * @param deleteSource delete source.
+   * @param conf configuration.
+   * @throws IOException raised on errors performing I/O.
+   * @return true if the operation succeeded.
+   */
   public static boolean copy(FileSystem srcFS, Path src,
                              File dst, boolean deleteSource,
                              Configuration conf) throws IOException {
@@ -958,7 +1009,7 @@ public class FileUtil {
    *
    * @param inFile The tar file as input.
    * @param untarDir The untar directory where to untar the tar file.
-   * @throws IOException
+   * @throws IOException an exception occurred.
    */
   public static void unTar(File inFile, File untarDir) throws IOException {
     if (!untarDir.mkdirs()) {
@@ -1169,6 +1220,7 @@ public class FileUtil {
    * @param target the target for symlink
    * @param linkname the symlink
    * @return 0 on success
+   * @throws IOException raised on errors performing I/O.
    */
   public static int symLink(String target, String linkname) throws IOException{
 
@@ -1230,8 +1282,8 @@ public class FileUtil {
    * @param filename the name of the file to change
    * @param perm the permission string
    * @return the exit code from the command
-   * @throws IOException
-   * @throws InterruptedException
+   * @throws IOException raised on errors performing I/O.
+   * @throws InterruptedException command interrupted.
    */
   public static int chmod(String filename, String perm
                           ) throws IOException, InterruptedException {
@@ -1245,7 +1297,7 @@ public class FileUtil {
    * @param perm permission string
    * @param recursive true, if permissions should be changed recursively
    * @return the exit code from the command.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public static int chmod(String filename, String perm, boolean recursive)
                             throws IOException {
@@ -1271,7 +1323,7 @@ public class FileUtil {
    * @param file the file to change
    * @param username the new user owner name
    * @param groupname the new group owner name
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public static void setOwner(File file, String username,
       String groupname) throws IOException {
@@ -1288,7 +1340,7 @@ public class FileUtil {
    * Platform independent implementation for {@link File#setReadable(boolean)}
    * File#setReadable does not work as expected on Windows.
    * @param f input file
-   * @param readable
+   * @param readable readable.
    * @return true on success, false otherwise
    */
   public static boolean setReadable(File f, boolean readable) {
@@ -1309,7 +1361,7 @@ public class FileUtil {
    * Platform independent implementation for {@link File#setWritable(boolean)}
    * File#setWritable does not work as expected on Windows.
    * @param f input file
-   * @param writable
+   * @param writable writable.
    * @return true on success, false otherwise
    */
   public static boolean setWritable(File f, boolean writable) {
@@ -1333,7 +1385,7 @@ public class FileUtil {
    * behavior on Windows as on Unix platforms. Creating, deleting or renaming
    * a file within that folder will still succeed on Windows.
    * @param f input file
-   * @param executable
+   * @param executable executable.
    * @return true on success, false otherwise
    */
   public static boolean setExecutable(File f, boolean executable) {
@@ -1412,7 +1464,7 @@ public class FileUtil {
    * of forking if group == other.
    * @param f the file to change
    * @param permission the new permissions
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public static void setPermission(File f, FsPermission permission
                                    ) throws IOException {
@@ -1717,6 +1769,7 @@ public class FileUtil {
    * wildcard path to return all jars from the directory to use in a classpath.
    *
    * @param path the path to the directory. The path may include the wildcard.
+   * @param useLocal use local.
    * @return the list of jars as URLs, or an empty list if there are no jars, or
    * the directory does not exist
    */

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java

@@ -233,7 +233,7 @@ public class FilterFileSystem extends FileSystem {
    * 
    * @param src file name
    * @param replication new replication
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    * @return true if successful;
    *         false if file does not exist or is a directory
    */
@@ -304,7 +304,7 @@ public class FilterFileSystem extends FileSystem {
    * Set the current working directory for the given file system. All relative
    * paths will be resolved relative to it.
    * 
-   * @param newDir
+   * @param newDir new dir.
    */
   @Override
   public void setWorkingDirectory(Path newDir) {

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java

@@ -130,7 +130,7 @@ public class FsShell extends Configured implements Tool {
    * Returns the current trash location for the path specified
    * @param path to be deleted
    * @return path to the trash
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public Path getCurrentTrashDir(Path path) throws IOException {
     return getTrash().getCurrentTrashDir(path);

+ 19 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java

@@ -35,24 +35,39 @@ public class FsStatus implements Writable {
   private long used;
   private long remaining;
 
-  /** Construct a FsStatus object, using the specified statistics */
+  /**
+   * Construct a FsStatus object, using the specified statistics.
+   *
+   * @param capacity capacity.
+   * @param used used.
+   * @param remaining remaining.
+   */
   public FsStatus(long capacity, long used, long remaining) {
     this.capacity = capacity;
     this.used = used;
     this.remaining = remaining;
   }
 
-  /** Return the capacity in bytes of the file system */
+  /**
+   * Return the capacity in bytes of the file system.
+   * @return capacity.
+   */
   public long getCapacity() {
     return capacity;
   }
 
-  /** Return the number of bytes used on the file system */
+  /**
+   * Return the number of bytes used on the file system.
+   * @return used.
+   */
   public long getUsed() {
     return used;
   }
 
-  /** Return the number of remaining bytes on the file system */
+  /**
+   * Return the number of remaining bytes on the file system.
+   * @return remaining.
+   */
   public long getRemaining() {
     return remaining;
   }

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java

@@ -56,9 +56,9 @@ public class GlobExpander {
    * {a,b}/{c/\d}        - {a,b}/c/d
    * </pre>
    * 
-   * @param filePattern
+   * @param filePattern file pattern.
    * @return expanded file patterns
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public static List<String> expand(String filePattern) throws IOException {
     List<String> fullyExpanded = new ArrayList<String>();

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobalStorageStatistics.java

@@ -104,6 +104,8 @@ public enum GlobalStorageStatistics {
   /**
    * Get an iterator that we can use to iterate throw all the global storage
    * statistics objects.
+   *
+   * @return StorageStatistics Iterator.
    */
   synchronized public Iterator<StorageStatistics> iterator() {
     Entry<String, StorageStatistics> first = map.firstEntry();

+ 4 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java

@@ -463,7 +463,7 @@ public class HarFileSystem extends FileSystem {
    * @param start the start of the desired range in the contained file
    * @param len the length of the desired range
    * @return block locations for this segment of file
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   @Override
   public BlockLocation[] getFileBlockLocations(FileStatus file, long start,
@@ -525,7 +525,7 @@ public class HarFileSystem extends FileSystem {
    * Combine the status stored in the index and the underlying status. 
    * @param h status stored in the index
    * @return the combined file status
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   private FileStatus toFileStatus(HarStatus h) throws IOException {
     final Path p = h.isDir ? archivePath : new Path(archivePath, h.partName);
@@ -635,7 +635,7 @@ public class HarFileSystem extends FileSystem {
    * while creating a hadoop archive.
    * @param f the path in har filesystem
    * @return filestatus.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   @Override
   public FileStatus getFileStatus(Path f) throws IOException {
@@ -1104,7 +1104,7 @@ public class HarFileSystem extends FileSystem {
      * @param start the start position in the part file
      * @param length the length of valid data in the part file
      * @param bufsize the buffer size
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     public HarFSDataInputStream(FileSystem fs, Path  p, long start, 
         long length, int bufsize) throws IOException {

+ 6 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java

@@ -156,6 +156,7 @@ public class HardLink {
    * Creates a hardlink.
    * @param file - existing source file
    * @param linkName - desired target link file
+   * @throws IOException raised on errors performing I/O.
    */
   public static void createHardLink(File file, File linkName)
       throws IOException {
@@ -177,6 +178,7 @@ public class HardLink {
    * @param fileBaseNames - list of path-less file names, as returned by 
    *                        parentDir.list()
    * @param linkDir - where the hardlinks should be put. It must already exist.
+   * @throws IOException raised on errors performing I/O.
    */
   public static void createHardLinkMult(File parentDir, String[] fileBaseNames,
       File linkDir) throws IOException {
@@ -204,6 +206,10 @@ public class HardLink {
 
    /**
    * Retrieves the number of links to the specified file.
+    *
+    * @param fileName file name.
+    * @throws IOException raised on errors performing I/O.
+    * @return link count.
    */
   public static int getLinkCount(File fileName) throws IOException {
     if (fileName == null) {

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HasFileDescriptor.java

@@ -33,7 +33,7 @@ public interface HasFileDescriptor {
 
   /**
    * @return the FileDescriptor
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public FileDescriptor getFileDescriptor() throws IOException;
 

+ 17 - 14
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java

@@ -78,8 +78,9 @@ public class LocalDirAllocator {
 
   private final DiskValidator diskValidator;
 
-  /**Create an allocator object
-   * @param contextCfgItemName
+  /**
+   * Create an allocator object.
+   * @param contextCfgItemName contextCfgItemName.
    */
   public LocalDirAllocator(String contextCfgItemName) {
     this.contextCfgItemName = contextCfgItemName;
@@ -123,7 +124,7 @@ public class LocalDirAllocator {
    *  available disk)
    *  @param conf the Configuration object
    *  @return the complete path to the file on a local disk
-   *  @throws IOException
+   *  @throws IOException raised on errors performing I/O.
    */
   public Path getLocalPathForWrite(String pathStr, 
       Configuration conf) throws IOException {
@@ -139,7 +140,7 @@ public class LocalDirAllocator {
    *  @param size the size of the file that is going to be written
    *  @param conf the Configuration object
    *  @return the complete path to the file on a local disk
-   *  @throws IOException
+   *  @throws IOException raised on errors performing I/O.
    */
   public Path getLocalPathForWrite(String pathStr, long size, 
       Configuration conf) throws IOException {
@@ -156,7 +157,7 @@ public class LocalDirAllocator {
    *  @param conf the Configuration object
    *  @param checkWrite ensure that the path is writable
    *  @return the complete path to the file on a local disk
-   *  @throws IOException
+   *  @throws IOException raised on errors performing I/O.
    */
   public Path getLocalPathForWrite(String pathStr, long size, 
                                    Configuration conf,
@@ -171,7 +172,7 @@ public class LocalDirAllocator {
    *  @param pathStr the requested file (this will be searched)
    *  @param conf the Configuration object
    *  @return the complete path to the file on a local disk
-   *  @throws IOException
+   *  @throws IOException raised on errors performing I/O.
    */
   public Path getLocalPathToRead(String pathStr, 
       Configuration conf) throws IOException {
@@ -184,7 +185,7 @@ public class LocalDirAllocator {
    * @param pathStr the path underneath the roots
    * @param conf the configuration to look up the roots in
    * @return all of the paths that exist under any of the roots
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public Iterable<Path> getAllLocalPathsToRead(String pathStr, 
                                                Configuration conf
@@ -205,7 +206,7 @@ public class LocalDirAllocator {
    *  @param size the size of the file that is going to be written
    *  @param conf the Configuration object
    *  @return a unique temporary file
-   *  @throws IOException
+   *  @throws IOException raised on errors performing I/O.
    */
   public File createTmpFileForWrite(String pathStr, long size, 
       Configuration conf) throws IOException {
@@ -213,8 +214,9 @@ public class LocalDirAllocator {
     return context.createTmpFileForWrite(pathStr, size, conf);
   }
   
-  /** Method to check whether a context is valid
-   * @param contextCfgItemName
+  /**
+   * Method to check whether a context is valid.
+   * @param contextCfgItemName contextCfgItemName.
    * @return true/false
    */
   public static boolean isContextValid(String contextCfgItemName) {
@@ -224,9 +226,9 @@ public class LocalDirAllocator {
   }
   
   /**
-   * Removes the context from the context config items
+   * Removes the context from the context config items.
    * 
-   * @param contextCfgItemName
+   * @param contextCfgItemName contextCfgItemName.
    */
   @Deprecated
   @InterfaceAudience.LimitedPrivate({"MapReduce"})
@@ -236,8 +238,9 @@ public class LocalDirAllocator {
     }
   }
     
-  /** We search through all the configured dirs for the file's existence
-   *  and return true when we find  
+  /**
+   *  We search through all the configured dirs for the file's existence
+   *  and return true when we find.
    *  @param pathStr the requested file (this will be searched)
    *  @param conf the Configuration object
    *  @return true if files exist. false otherwise

+ 5 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java

@@ -71,7 +71,11 @@ public class LocalFileSystem extends ChecksumFileSystem {
     super(rawLocalFileSystem);
   }
     
-  /** Convert a path to a File. */
+  /**
+   * Convert a path to a File.
+   * @param path the path.
+   * @return file.
+   */
   public File pathToFile(Path path) {
     return ((RawLocalFileSystem)fs).pathToFile(path);
   }

+ 7 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32CastagnoliFileChecksum.java

@@ -28,7 +28,13 @@ public class MD5MD5CRC32CastagnoliFileChecksum extends MD5MD5CRC32FileChecksum {
     this(0, 0, null);
   }
 
-  /** Create a MD5FileChecksum */
+  /**
+   * Create a MD5FileChecksum.
+   *
+   * @param bytesPerCRC bytesPerCRC.
+   * @param crcPerBlock crcPerBlock.
+   * @param md5 md5.
+   */
   public MD5MD5CRC32CastagnoliFileChecksum(int bytesPerCRC, long crcPerBlock, MD5Hash md5) {
     super(bytesPerCRC, crcPerBlock, md5);
   }

+ 11 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java

@@ -44,7 +44,13 @@ public class MD5MD5CRC32FileChecksum extends FileChecksum {
     this(0, 0, null);
   }
 
-  /** Create a MD5FileChecksum */
+  /**
+   * Create a MD5FileChecksum.
+   *
+   * @param bytesPerCRC bytesPerCRC.
+   * @param crcPerBlock crcPerBlock.
+   * @param md5 md5.
+   */
   public MD5MD5CRC32FileChecksum(int bytesPerCRC, long crcPerBlock, MD5Hash md5) {
     this.bytesPerCRC = bytesPerCRC;
     this.crcPerBlock = crcPerBlock;
@@ -76,7 +82,10 @@ public class MD5MD5CRC32FileChecksum extends FileChecksum {
     return WritableUtils.toByteArray(this);
   }
 
-  /** returns the CRC type */
+  /**
+   * returns the CRC type.
+   * @return data check sum type.
+   */
   public DataChecksum.Type getCrcType() {
     // default to the one that is understood by all releases.
     return DataChecksum.Type.CRC32;

+ 7 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32GzipFileChecksum.java

@@ -28,7 +28,13 @@ public class MD5MD5CRC32GzipFileChecksum extends MD5MD5CRC32FileChecksum {
     this(0, 0, null);
   }
 
-  /** Create a MD5FileChecksum */
+  /**
+   * Create a MD5FileChecksum.
+   *
+   * @param bytesPerCRC bytesPerCRC.
+   * @param crcPerBlock crcPerBlock.
+   * @param md5 md5.
+   */
   public MD5MD5CRC32GzipFileChecksum(int bytesPerCRC, long crcPerBlock, MD5Hash md5) {
     super(bytesPerCRC, crcPerBlock, md5);
   }

+ 2 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploader.java

@@ -31,10 +31,11 @@ import org.apache.hadoop.fs.statistics.IOStatisticsSource;
 /**
  * MultipartUploader is an interface for copying files multipart and across
  * multiple nodes.
- * <p></p>
+ * <p>
  * The interface extends {@link IOStatisticsSource} so that there is no
  * need to cast an instance to see if is a source of statistics.
  * However, implementations MAY return null for their actual statistics.
+ * </p>
  */
 @InterfaceAudience.Public
 @InterfaceStability.Unstable

+ 17 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java

@@ -25,34 +25,43 @@ import org.apache.hadoop.fs.permission.FsPermission;
 
 /**
  * Builder interface for Multipart readers.
- * @param <S>
- * @param <B>
+ * @param <S> MultipartUploader Generic Type.
+ * @param <B> MultipartUploaderBuilder Generic Type.
  */
 public interface MultipartUploaderBuilder<S extends MultipartUploader, B extends MultipartUploaderBuilder<S, B>>
     extends FSBuilder<S, B> {
 
   /**
    * Set permission for the file.
+   * @param perm permission.
+   * @return B Generics Type.
    */
   B permission(@Nonnull FsPermission perm);
 
   /**
    * Set the size of the buffer to be used.
+   * @param bufSize buffer size.
+   * @return B Generics Type.
    */
   B bufferSize(int bufSize);
 
   /**
    * Set replication factor.
+   * @param replica replica.
+   * @return B Generics Type.
    */
   B replication(short replica);
 
   /**
    * Set block size.
+   * @param blkSize blkSize.
+   * @return B Generics Type.
    */
   B blockSize(long blkSize);
 
   /**
    * Create an FSDataOutputStream at the specified path.
+   * @return B Generics Type.
    */
   B create();
 
@@ -60,16 +69,21 @@ public interface MultipartUploaderBuilder<S extends MultipartUploader, B extends
    * Set to true to overwrite the existing file.
    * Set it to false, an exception will be thrown when calling {@link #build()}
    * if the file exists.
+   * @param overwrite overwrite.
+   * @return B Generics Type.
    */
   B overwrite(boolean overwrite);
 
   /**
    * Append to an existing file (optional operation).
+   * @return B Generics Type.
    */
   B append();
 
   /**
    * Set checksum opt.
+   * @param chksumOpt chk sum opt.
+   * @return B Generics Type.
    */
   B checksumOpt(@Nonnull Options.ChecksumOpt chksumOpt);
 
@@ -78,6 +92,7 @@ public interface MultipartUploaderBuilder<S extends MultipartUploader, B extends
    *
    * @throws IllegalArgumentException if the parameters are not valid.
    * @throws IOException on errors when file system creates or appends the file.
+   * @return S Generics Type.
    */
   S build() throws IllegalArgumentException, IOException;
 }

+ 6 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java

@@ -280,7 +280,9 @@ public final class Options {
     }
 
     /**
-     * Create a ChecksumOpts that disables checksum
+     * Create a ChecksumOpts that disables checksum.
+     *
+     * @return ChecksumOpt.
      */
     public static ChecksumOpt createDisabled() {
       return new ChecksumOpt(DataChecksum.Type.NULL, -1);
@@ -295,6 +297,7 @@ public final class Options {
      * @param userOpt User-specified checksum option. Ignored if null.
      * @param userBytesPerChecksum User-specified bytesPerChecksum
      *                Ignored if {@literal <} 0.
+     * @return ChecksumOpt.
      */
     public static ChecksumOpt processChecksumOpt(ChecksumOpt defaultOpt, 
         ChecksumOpt userOpt, int userBytesPerChecksum) {
@@ -330,6 +333,8 @@ public final class Options {
      *
      * @param defaultOpt Default checksum option
      * @param userOpt User-specified checksum option
+     *
+     * @return ChecksumOpt.
      */
     public static ChecksumOpt processChecksumOpt(ChecksumOpt defaultOpt,
         ChecksumOpt userOpt) {

+ 52 - 12
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/QuotaUsage.java

@@ -105,7 +105,9 @@ public class QuotaUsage {
   // Make it protected for the deprecated ContentSummary constructor.
   protected QuotaUsage() { }
 
-  /** Build the instance based on the builder. */
+  /** Build the instance based on the builder.
+   * @param builder bulider.
+   */
   protected QuotaUsage(Builder builder) {
     this.fileAndDirectoryCount = builder.fileAndDirectoryCount;
     this.quota = builder.quota;
@@ -127,37 +129,67 @@ public class QuotaUsage {
     this.spaceQuota = spaceQuota;
   }
 
-  /** Return the directory count. */
+  /**
+   * Return the directory count.
+   *
+   * @return file and directory count.
+   */
   public long getFileAndDirectoryCount() {
     return fileAndDirectoryCount;
   }
 
-  /** Return the directory quota. */
+  /**
+   * Return the directory quota.
+   *
+   * @return quota.
+   */
   public long getQuota() {
     return quota;
   }
 
-  /** Return (disk) space consumed. */
+  /**
+   * Return (disk) space consumed.
+   *
+   * @return space consumed.
+   */
   public long getSpaceConsumed() {
     return spaceConsumed;
   }
 
-  /** Return (disk) space quota. */
+  /**
+   * Return (disk) space quota.
+   *
+   * @return space quota.
+   */
   public long getSpaceQuota() {
     return spaceQuota;
   }
 
-  /** Return storage type quota. */
+  /**
+   * Return storage type quota.
+   *
+   * @param type storage type.
+   * @return type quota.
+   */
   public long getTypeQuota(StorageType type) {
     return (typeQuota != null) ? typeQuota[type.ordinal()] : -1L;
   }
 
-  /** Return storage type consumed. */
+  /**
+   * Return storage type consumed.
+   *
+   * @param type storage type.
+   * @return type consumed.
+   */
   public long getTypeConsumed(StorageType type) {
     return (typeConsumed != null) ? typeConsumed[type.ordinal()] : 0L;
   }
 
-  /** Return true if any storage type quota has been set. */
+  /**
+   * Return true if any storage type quota has been set.
+   *
+   * @return if any storage type quota has been set true, not false.
+   * */
   public boolean isTypeQuotaSet() {
     if (typeQuota != null) {
       for (StorageType t : StorageType.getTypesSupportingQuota()) {
@@ -169,7 +201,12 @@ public class QuotaUsage {
     return false;
   }
 
-  /** Return true if any storage type consumption information is available. */
+  /**
+   * Return true if any storage type consumption information is available.
+   *
+   * @return if any storage type consumption information
+   * is available, not false.
+   */
   public boolean isTypeConsumedAvailable() {
     if (typeConsumed != null) {
       for (StorageType t : StorageType.getTypesSupportingQuota()) {
@@ -271,12 +308,15 @@ public class QuotaUsage {
     return toString(hOption, false, null);
   }
 
-  /** Return the string representation of the object in the output format.
+  /**
+   * Return the string representation of the object in the output format.
    * if hOption is false file sizes are returned in bytes
    * if hOption is true file sizes are returned in human readable
    *
    * @param hOption a flag indicating if human readable output if to be used
-   * @return the string representation of the object
+   * @param tOption type option.
+   * @param types storage types.
+   * @return the string representation of the object.
    */
   public String toString(boolean hOption,
       boolean tOption, List<StorageType> types) {
@@ -328,7 +368,7 @@ public class QuotaUsage {
   /**
    * return the header of with the StorageTypes.
    *
-   * @param storageTypes
+   * @param storageTypes storage types.
    * @return storage header string
    */
   public static String getStorageTypeHeader(List<StorageType> storageTypes) {

+ 6 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java

@@ -100,7 +100,12 @@ public class RawLocalFileSystem extends FileSystem {
     }
   }
   
-  /** Convert a path to a File. */
+  /**
+   * Convert a path to a File.
+   *
+   * @param path the path.
+   * @return file.
+   */
   public File pathToFile(Path path) {
     checkPath(path);
     if (!path.isAbsolute()) {

+ 12 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Seekable.java

@@ -32,17 +32,27 @@ public interface Seekable {
    * Seek to the given offset from the start of the file.
    * The next read() will be from that location.  Can't
    * seek past the end of the file.
+   *
+   * @param pos offset from the start of the file.
+   * @throws IOException raised on errors performing I/O.
    */
   void seek(long pos) throws IOException;
-  
+
   /**
    * Return the current offset from the start of the file
+   *
+   * @return offset from the start of the file.
+   * @throws IOException raised on errors performing I/O.
    */
   long getPos() throws IOException;
 
   /**
-   * Seeks a different copy of the data.  Returns true if 
+   * Seeks a different copy of the data.  Returns true if
    * found a new source, false otherwise.
+   *
+   * @param targetPos target position.
+   * @return true if found a new source, false otherwise.
+   * @throws IOException raised on errors performing I/O.
    */
   @InterfaceAudience.Private
   boolean seekToNewSource(long targetPos) throws IOException;

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java

@@ -73,8 +73,8 @@ public class Stat extends Shell {
   }
 
   /**
-   * Whether Stat is supported on the current platform
-   * @return
+   * Whether Stat is supported on the current platform.
+   * @return if is available true, not false.
    */
   public static boolean isAvailable() {
     if (Shell.LINUX || Shell.FREEBSD || Shell.MAC) {

+ 5 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageStatistics.java

@@ -127,6 +127,7 @@ public abstract class StorageStatistics {
 
   /**
    * Get the name of this StorageStatistics object.
+   * @return name of this StorageStatistics object
    */
   public String getName() {
     return name;
@@ -145,12 +146,15 @@ public abstract class StorageStatistics {
    *
    * The values returned will depend on the type of FileSystem or FileContext
    * object.  The values do not necessarily reflect a snapshot in time.
+   *
+   * @return LongStatistic Iterator.
    */
   public abstract Iterator<LongStatistic> getLongStatistics();
 
   /**
    * Get the value of a statistic.
    *
+   * @param key key.
    * @return         null if the statistic is not being tracked or is not a
    *                 long statistic. The value of the statistic, otherwise.
    */
@@ -159,6 +163,7 @@ public abstract class StorageStatistics {
   /**
    * Return true if a statistic is being tracked.
    *
+   * @param key key.
    * @return         True only if the statistic is being tracked.
    */
   public abstract boolean isTracked(String key);

+ 36 - 7
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java

@@ -43,6 +43,7 @@ public class Trash extends Configured {
   /** 
    * Construct a trash can accessor.
    * @param conf a Configuration
+   * @throws IOException raised on errors performing I/O.
    */
   public Trash(Configuration conf) throws IOException {
     this(FileSystem.get(conf), conf);
@@ -52,6 +53,7 @@ public class Trash extends Configured {
    * Construct a trash can accessor for the FileSystem provided.
    * @param fs the FileSystem
    * @param conf a Configuration
+   * @throws IOException raised on errors performing I/O.
    */
   public Trash(FileSystem fs, Configuration conf) throws IOException {
     super(conf);
@@ -97,47 +99,74 @@ public class Trash extends Configured {
   }
   
   /**
-   * Returns whether the trash is enabled for this filesystem
+   * Returns whether the trash is enabled for this filesystem.
+   *
+   * @return return if isEnabled true,not false.
    */
   public boolean isEnabled() {
     return trashPolicy.isEnabled();
   }
 
   /** Move a file or directory to the current trash directory.
+   *
+   * @param path the path.
    * @return false if the item is already in the trash or trash is disabled
+   * @throws IOException raised on errors performing I/O.
    */ 
   public boolean moveToTrash(Path path) throws IOException {
     return trashPolicy.moveToTrash(path);
   }
 
-  /** Create a trash checkpoint. */
+  /**
+   * Create a trash checkpoint.
+   * @throws IOException raised on errors performing I/O.
+   */
   public void checkpoint() throws IOException {
     trashPolicy.createCheckpoint();
   }
 
-  /** Delete old checkpoint(s). */
+  /**
+   * Delete old checkpoint(s).
+   * @throws IOException raised on errors performing I/O.
+   */
   public void expunge() throws IOException {
     trashPolicy.deleteCheckpoint();
   }
 
-  /** Delete all trash immediately. */
+  /**
+   * Delete all trash immediately.
+   * @throws IOException raised on errors performing I/O.
+   */
   public void expungeImmediately() throws IOException {
     trashPolicy.createCheckpoint();
     trashPolicy.deleteCheckpointsImmediately();
   }
 
-  /** get the current working directory */
+  /**
+   * get the current working directory.
+   *
+   * @throws IOException on raised on errors performing I/O.
+   * @return Trash Dir.
+   */
   Path getCurrentTrashDir() throws IOException {
     return trashPolicy.getCurrentTrashDir();
   }
 
-  /** get the configured trash policy */
+  /**
+   * get the configured trash policy.
+   *
+   * @return TrashPolicy.
+   */
   TrashPolicy getTrashPolicy() {
     return trashPolicy;
   }
 
-  /** Return a {@link Runnable} that periodically empties the trash of all
+  /**
+   * Return a {@link Runnable} that periodically empties the trash of all
    * users, intended to be run by the superuser.
+   *
+   * @throws IOException on raised on errors performing I/O.
+   * @return Runnable.
    */
   public Runnable getEmptier() throws IOException {
     return trashPolicy.getEmptier();

+ 14 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java

@@ -60,27 +60,34 @@ public abstract class TrashPolicy extends Configured {
 
   /**
    * Returns whether the Trash Policy is enabled for this filesystem.
+   *
+   * @return if isEnabled true,not false.
    */
   public abstract boolean isEnabled();
 
   /** 
    * Move a file or directory to the current trash directory.
+   * @param path the path.
    * @return false if the item is already in the trash or trash is disabled
+   * @throws IOException raised on errors performing I/O.
    */ 
   public abstract boolean moveToTrash(Path path) throws IOException;
 
   /** 
-   * Create a trash checkpoint. 
+   * Create a trash checkpoint.
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract void createCheckpoint() throws IOException;
 
   /** 
    * Delete old trash checkpoint(s).
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract void deleteCheckpoint() throws IOException;
 
   /**
    * Delete all checkpoints immediately, ie empty trash.
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract void deleteCheckpointsImmediately() throws IOException;
 
@@ -94,6 +101,8 @@ public abstract class TrashPolicy extends Configured {
    * TrashPolicy#getCurrentTrashDir(Path path).
    * It returns the trash location correctly for the path specified no matter
    * the path is in encryption zone or not.
+   *
+   * @return the path.
    */
   public abstract Path getCurrentTrashDir();
 
@@ -102,7 +111,7 @@ public abstract class TrashPolicy extends Configured {
    * Policy
    * @param path path to be deleted
    * @return current trash directory for the path to be deleted
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public Path getCurrentTrashDir(Path path) throws IOException {
     throw new UnsupportedOperationException();
@@ -111,6 +120,9 @@ public abstract class TrashPolicy extends Configured {
   /** 
    * Return a {@link Runnable} that periodically empties the trash of all
    * users, intended to be run by the superuser.
+   *
+   * @throws IOException raised on errors performing I/O.
+   * @return Runnable.
    */
   public abstract Runnable getEmptier() throws IOException;
 

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java

@@ -67,7 +67,7 @@ public enum XAttrCodec {
    * the given string is treated as text. 
    * @param value string representation of the value.
    * @return byte[] the value
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public static byte[] decodeValue(String value) throws IOException {
     byte[] result = null;
@@ -102,9 +102,9 @@ public enum XAttrCodec {
    * while strings encoded as hexadecimal and base64 are prefixed with 
    * 0x and 0s, respectively.
    * @param value byte[] value
-   * @param encoding
+   * @param encoding encoding.
    * @return String string representation of value
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public static String encodeValue(byte[] value, XAttrCodec encoding) 
       throws IOException {

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractFSBuilderImpl.java

@@ -340,12 +340,14 @@ public abstract class
 
   /**
    * Get all the keys that are set as mandatory keys.
+   * @return mandatory keys.
    */
   public Set<String> getMandatoryKeys() {
     return Collections.unmodifiableSet(mandatoryKeys);
   }
   /**
    * Get all the keys that are set as optional keys.
+   * @return optional keys.
    */
   public Set<String> getOptionalKeys() {
     return Collections.unmodifiableSet(optionalKeys);

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractMultipartUploader.java

@@ -127,7 +127,7 @@ public abstract class AbstractMultipartUploader implements MultipartUploader {
    * {@inheritDoc}.
    * @param path path to abort uploads under.
    * @return a future to -1.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public CompletableFuture<Integer> abortUploadsUnderPath(Path path)
       throws IOException {

+ 5 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureDataInputStreamBuilderImpl.java

@@ -126,6 +126,9 @@ public abstract class FutureDataInputStreamBuilderImpl
 
   /**
    * Set the size of the buffer to be used.
+   *
+   * @param bufSize buffer size.
+   * @return FutureDataInputStreamBuilder.
    */
   public FutureDataInputStreamBuilder bufferSize(int bufSize) {
     bufferSize = bufSize;
@@ -137,6 +140,8 @@ public abstract class FutureDataInputStreamBuilderImpl
    * This must be used after the constructor has been invoked to create
    * the actual builder: it allows for subclasses to do things after
    * construction.
+   *
+   * @return FutureDataInputStreamBuilder.
    */
   public FutureDataInputStreamBuilder builder() {
     return getThisBuilder();

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java

@@ -75,6 +75,8 @@ public final class FutureIOSupport {
    * See {@link FutureIO#awaitFuture(Future, long, TimeUnit)}.
    * @param future future to evaluate
    * @param <T> type of the result.
+   * @param timeout timeout.
+   * @param unit unit.
    * @return the result, if all went well.
    * @throws InterruptedIOException future was interrupted
    * @throws IOException if something went wrong

+ 3 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/MultipartUploaderBuilderImpl.java

@@ -88,6 +88,9 @@ public abstract class MultipartUploaderBuilderImpl
 
   /**
    * Constructor.
+   *
+   * @param fileSystem fileSystem.
+   * @param p path.
    */
   protected MultipartUploaderBuilderImpl(@Nonnull FileSystem fileSystem,
       @Nonnull Path p) {

+ 3 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java

@@ -185,7 +185,8 @@ public class AclStatus {
 
     /**
      * Sets the permission for the file.
-     * @param permission
+     * @param permission permission.
+     * @return Builder.
      */
     public Builder setPermission(FsPermission permission) {
       this.permission = permission;
@@ -224,6 +225,7 @@ public class AclStatus {
   /**
    * Get the effective permission for the AclEntry
    * @param entry AclEntry to get the effective action
+   * @return FsAction.
    */
   public FsAction getEffectivePermission(AclEntry entry) {
     return getEffectivePermission(entry, permission);

+ 16 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java

@@ -48,7 +48,8 @@ public enum FsAction {
 
   /**
    * Return true if this action implies that action.
-   * @param that
+   * @param that FsAction that.
+   * @return if implies true,not false.
    */
   public boolean implies(FsAction that) {
     if (that != null) {
@@ -57,15 +58,26 @@ public enum FsAction {
     return false;
   }
 
-  /** AND operation. */
+  /**
+   * AND operation.
+   * @param that FsAction that.
+   * @return FsAction.
+   */
   public FsAction and(FsAction that) {
     return vals[ordinal() & that.ordinal()];
   }
-  /** OR operation. */
+  /**
+   * OR operation.
+   * @param that FsAction that.
+   * @return FsAction.
+   */
   public FsAction or(FsAction that) {
     return vals[ordinal() | that.ordinal()];
   }
-  /** NOT operation. */
+  /**
+   * NOT operation.
+   * @return FsAction.
+   */
   public FsAction not() {
     return vals[7 - ordinal()];
   }

+ 8 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java

@@ -35,7 +35,10 @@ public final class FsCreateModes extends FsPermission {
   /**
    * Create from unmasked mode and umask.
    *
-   * If the mode is already an FsCreateModes object, return it.
+   * @param mode mode.
+   * @param umask umask.
+   * @return If the mode is already
+   * an FsCreateModes object, return it.
    */
   public static FsPermission applyUMask(FsPermission mode,
                                         FsPermission umask) {
@@ -47,6 +50,10 @@ public final class FsCreateModes extends FsPermission {
 
   /**
    * Create from masked and unmasked modes.
+   *
+   * @param masked masked.
+   * @param unmasked unmasked.
+   * @return FsCreateModes.
    */
   public static FsCreateModes create(FsPermission masked,
                                      FsPermission unmasked) {

+ 47 - 7
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java

@@ -56,7 +56,11 @@ public class FsPermission implements Writable, Serializable,
   /** Maximum acceptable length of a permission string to parse */
   public static final int MAX_PERMISSION_LENGTH = 10;
 
-  /** Create an immutable {@link FsPermission} object. */
+  /**
+   * Create an immutable {@link FsPermission} object.
+   * @param permission permission.
+   * @return FsPermission.
+   */
   public static FsPermission createImmutable(short permission) {
     return new ImmutableFsPermission(permission);
   }
@@ -85,7 +89,7 @@ public class FsPermission implements Writable, Serializable,
 
   /**
    * Construct by the given mode.
-   * @param mode
+   * @param mode mode.
    * @see #toShort()
    */
   public FsPermission(short mode) { fromShort(mode); }
@@ -145,13 +149,19 @@ public class FsPermission implements Writable, Serializable,
     this(new RawParser(mode).getPermission());
   }
 
-  /** Return user {@link FsAction}. */
+  /**
+   * @return Return user {@link FsAction}.
+   */
   public FsAction getUserAction() {return useraction;}
 
-  /** Return group {@link FsAction}. */
+  /**
+   * @return Return group {@link FsAction}.
+   */
   public FsAction getGroupAction() {return groupaction;}
 
-  /** Return other {@link FsAction}. */
+  /**
+   * @return Return other {@link FsAction}.
+   */
   public FsAction getOtherAction() {return otheraction;}
 
   private void set(FsAction u, FsAction g, FsAction o, boolean sb) {
@@ -180,6 +190,7 @@ public class FsPermission implements Writable, Serializable,
 
   /**
    * Get masked permission if exists.
+   * @return masked.
    */
   public FsPermission getMasked() {
     return null;
@@ -187,6 +198,7 @@ public class FsPermission implements Writable, Serializable,
 
   /**
    * Get unmasked permission if exists.
+   * @return unmasked.
    */
   public FsPermission getUnmasked() {
     return null;
@@ -194,6 +206,10 @@ public class FsPermission implements Writable, Serializable,
 
   /**
    * Create and initialize a {@link FsPermission} from {@link DataInput}.
+   *
+   * @param in data input.
+   * @throws IOException raised on errors performing I/O.
+   * @return FsPermission.
    */
   public static FsPermission read(DataInput in) throws IOException {
     FsPermission p = new FsPermission();
@@ -203,6 +219,7 @@ public class FsPermission implements Writable, Serializable,
 
   /**
    * Encode the object to a short.
+   * @return object to a short.
    */
   public short toShort() {
     int s =  (stickyBit ? 1 << 9 : 0)     |
@@ -301,6 +318,9 @@ public class FsPermission implements Writable, Serializable,
    * '-' sets bits in the mask.
    * 
    * Octal umask, the specified bits are set in the file mode creation mask.
+   *
+   * @param conf configuration.
+   * @return FsPermission UMask.
    */
   public static FsPermission getUMask(Configuration conf) {
     int umask = DEFAULT_UMASK;
@@ -346,7 +366,11 @@ public class FsPermission implements Writable, Serializable,
   }
 
   /**
-   * Returns true if the file is encrypted or directory is in an encryption zone
+   * Returns true if the file is encrypted or directory is in an encryption zone.
+   *
+   * @return if the file is encrypted or directory
+   * is in an encryption zone true, not false.
+   *
    * @deprecated Get encryption bit from the
    * {@link org.apache.hadoop.fs.FileStatus} object.
    */
@@ -357,6 +381,9 @@ public class FsPermission implements Writable, Serializable,
 
   /**
    * Returns true if the file or directory is erasure coded.
+   *
+   * @return if the file or directory is
+   * erasure coded true, not false.
    * @deprecated Get ec bit from the {@link org.apache.hadoop.fs.FileStatus}
    * object.
    */
@@ -365,7 +392,11 @@ public class FsPermission implements Writable, Serializable,
     return false;
   }
 
-  /** Set the user file creation mask (umask) */
+  /**
+   * Set the user file creation mask (umask)
+   * @param conf configuration.
+   * @param umask umask.
+   */
   public static void setUMask(Configuration conf, FsPermission umask) {
     conf.set(UMASK_LABEL, String.format("%1$03o", umask.toShort()));
   }
@@ -379,6 +410,8 @@ public class FsPermission implements Writable, Serializable,
    * {@link FsPermission#getDirDefault()} for directory, and use
    * {@link FsPermission#getFileDefault()} for file.
    * This method is kept for compatibility.
+   *
+   * @return Default FsPermission.
    */
   public static FsPermission getDefault() {
     return new FsPermission((short)00777);
@@ -386,6 +419,8 @@ public class FsPermission implements Writable, Serializable,
 
   /**
    * Get the default permission for directory.
+   *
+   * @return DirDefault FsPermission.
    */
   public static FsPermission getDirDefault() {
     return new FsPermission((short)00777);
@@ -393,6 +428,8 @@ public class FsPermission implements Writable, Serializable,
 
   /**
    * Get the default permission for file.
+   *
+   * @return FileDefault FsPermission.
    */
   public static FsPermission getFileDefault() {
     return new FsPermission((short)00666);
@@ -400,6 +437,8 @@ public class FsPermission implements Writable, Serializable,
 
   /**
    * Get the default permission for cache pools.
+   *
+   * @return CachePoolDefault FsPermission.
    */
   public static FsPermission getCachePoolDefault() {
     return new FsPermission((short)00755);
@@ -408,6 +447,7 @@ public class FsPermission implements Writable, Serializable,
   /**
    * Create a FsPermission from a Unix symbolic permission string
    * @param unixSymbolicPermission e.g. "-rw-rw-rw-"
+   * @return FsPermission.
    */
   public static FsPermission valueOf(String unixSymbolicPermission) {
     if (unixSymbolicPermission == null) {

+ 34 - 5
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java

@@ -39,7 +39,13 @@ public class PermissionStatus implements Writable {
     WritableFactories.setFactory(PermissionStatus.class, FACTORY);
   }
 
-  /** Create an immutable {@link PermissionStatus} object. */
+  /**
+   * Create an immutable {@link PermissionStatus} object.
+   * @param user user.
+   * @param group group.
+   * @param permission permission.
+   * @return PermissionStatus.
+   */
   public static PermissionStatus createImmutable(
       String user, String group, FsPermission permission) {
     return new PermissionStatus(user, group, permission) {
@@ -56,20 +62,35 @@ public class PermissionStatus implements Writable {
 
   private PermissionStatus() {}
 
-  /** Constructor */
+  /**
+   * Constructor.
+   *
+   * @param user user.
+   * @param group group.
+   * @param permission permission.
+   */
   public PermissionStatus(String user, String group, FsPermission permission) {
     username = user;
     groupname = group;
     this.permission = permission;
   }
 
-  /** Return user name */
+  /**
+   * Return user name.
+   * @return user name.
+   */
   public String getUserName() {return username;}
 
-  /** Return group name */
+  /**
+   * Return group name.
+   * @return group name.
+   */
   public String getGroupName() {return groupname;}
 
-  /** Return permission */
+  /**
+   * Return permission.
+   * @return FsPermission.
+   */
   public FsPermission getPermission() {return permission;}
 
   @Override
@@ -86,6 +107,9 @@ public class PermissionStatus implements Writable {
 
   /**
    * Create and initialize a {@link PermissionStatus} from {@link DataInput}.
+   * @param in data input.
+   * @throws IOException raised on errors performing I/O.
+   * @return PermissionStatus.
    */
   public static PermissionStatus read(DataInput in) throws IOException {
     PermissionStatus p = new PermissionStatus();
@@ -95,6 +119,11 @@ public class PermissionStatus implements Writable {
 
   /**
    * Serialize a {@link PermissionStatus} from its base components.
+   * @param out out.
+   * @param username username.
+   * @param groupname groupname.
+   * @param permission FsPermission.
+   * @throws IOException raised on errors performing I/O.
    */
   public static void write(DataOutput out,
                            String username, 

+ 18 - 6
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java

@@ -77,7 +77,11 @@ abstract public class Command extends Configured {
     err = System.err;
   }
   
-  /** Constructor */
+  /**
+   * Constructor.
+   *
+   * @param conf configuration.
+   */
   protected Command(Configuration conf) {
     super(conf);
   }
@@ -109,7 +113,7 @@ abstract public class Command extends Configured {
    * Execute the command on the input path data. Commands can override to make
    * use of the resolved filesystem.
    * @param pathData The input path with resolved filesystem
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   protected void run(PathData pathData) throws IOException {
     run(pathData.path);
@@ -136,11 +140,19 @@ abstract public class Command extends Configured {
     return exitCode;
   }
 
-  /** sets the command factory for later use */
+  /**
+   * sets the command factory for later use.
+   * @param factory factory.
+   */
   public void setCommandFactory(CommandFactory factory) {
     this.commandFactory = factory;
   }
-  /** retrieves the command factory */
+
+  /**
+   * retrieves the command factory.
+   *
+   * @return command factory.
+   */
   protected CommandFactory getCommandFactory() {
     return this.commandFactory;
   }
@@ -201,7 +213,7 @@ abstract public class Command extends Configured {
    * IllegalArgumentException is thrown, the FsShell object will print the
    * short usage of the command.
    * @param args the command line arguments
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   protected void processOptions(LinkedList<String> args) throws IOException {}
 
@@ -211,7 +223,7 @@ abstract public class Command extends Configured {
    * {@link #expandArguments(LinkedList)} and pass the resulting list to
    * {@link #processArguments(LinkedList)} 
    * @param args the list of argument strings
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   protected void processRawArguments(LinkedList<String> args)
   throws IOException {

+ 3 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java

@@ -119,6 +119,8 @@ abstract class CommandWithDestination extends FsCommand {
    * owner, group and permission information of the source
    * file will be preserved as far as target {@link FileSystem}
    * implementation allows.
+   *
+   * @param preserve preserve.
    */
   protected void setPreserve(boolean preserve) {
     if (preserve) {
@@ -175,6 +177,7 @@ abstract class CommandWithDestination extends FsCommand {
    *  The last arg is expected to be a local path, if only one argument is
    *  given then the destination will be the current directory 
    *  @param args is the list of arguments
+   * @throws IOException raised on errors performing I/O.
    */
   protected void getLocalDestination(LinkedList<String> args)
   throws IOException {

+ 2 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java

@@ -610,10 +610,11 @@ public class PathData implements Comparable<PathData> {
 
   /**
    * Open a file for sequential IO.
-   * <p></p>
+   * <p>
    * This uses FileSystem.openFile() to request sequential IO;
    * the file status is also passed in.
    * Filesystems may use to optimize their IO.
+   * </p>
    * @return an input stream
    * @throws IOException failure
    */

+ 16 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java

@@ -38,12 +38,18 @@ public abstract class BaseExpression implements Expression, Configurable {
   private String[] usage = { "Not yet implemented" };
   private String[] help = { "Not yet implemented" };
 
-  /** Sets the usage text for this {@link Expression} */
+  /**
+   * Sets the usage text for this {@link Expression} .
+   * @param usage usage array.
+   */
   protected void setUsage(String[] usage) {
     this.usage = usage;
   }
 
-  /** Sets the help text for this {@link Expression} */
+  /**
+   * Sets the help text for this {@link Expression} .
+   * @param help help.
+   */
   protected void setHelp(String[] help) {
     this.help = help;
   }
@@ -92,7 +98,10 @@ public abstract class BaseExpression implements Expression, Configurable {
   /** Children of this expression. */
   private LinkedList<Expression> children = new LinkedList<Expression>();
 
-  /** Return the options to be used by this expression. */
+  /**
+   * Return the options to be used by this expression.
+   * @return options.
+   */
   protected FindOptions getOptions() {
     return (this.options == null) ? new FindOptions() : this.options;
   }
@@ -265,6 +274,7 @@ public abstract class BaseExpression implements Expression, Configurable {
    * @param depth
    *          current depth in the process directories
    * @return FileStatus
+   * @throws IOException raised on errors performing I/O.
    */
   protected FileStatus getFileStatus(PathData item, int depth)
       throws IOException {
@@ -285,6 +295,8 @@ public abstract class BaseExpression implements Expression, Configurable {
    * @param item
    *          PathData
    * @return Path
+   *
+   * @throws IOException raised on errors performing I/O.
    */
   protected Path getPath(PathData item) throws IOException {
     return item.path;
@@ -295,6 +307,7 @@ public abstract class BaseExpression implements Expression, Configurable {
    *
    * @param item PathData
    * @return FileSystem
+   * @throws IOException raised on errors performing I/O.
    */
   protected FileSystem getFileSystem(PathData item) throws IOException {
     return item.fs;

+ 12 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Expression.java

@@ -30,13 +30,15 @@ public interface Expression {
   /**
    * Set the options for this expression, called once before processing any
    * items.
+   * @param options options.
+   * @throws IOException raised on errors performing I/O.
    */
   public void setOptions(FindOptions options) throws IOException;
 
   /**
    * Prepares the expression for execution, called once after setting options
    * and before processing any options.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public void prepare() throws IOException;
 
@@ -46,13 +48,14 @@ public interface Expression {
    * @param item {@link PathData} item to be processed
    * @param depth distance of the item from the command line argument
    * @return {@link Result} of applying the expression to the item
+   * @throws IOException raised on errors performing I/O.
    */
   public Result apply(PathData item, int depth) throws IOException;
 
   /**
    * Finishes the expression, called once after processing all items.
    *
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public void finish() throws IOException;
 
@@ -76,15 +79,21 @@ public interface Expression {
   /**
    * Indicates whether this expression performs an action, i.e. provides output
    * back to the user.
+   * @return if is action true, not false.
    */
   public boolean isAction();
 
-  /** Identifies the expression as an operator rather than a primary. */
+  /**
+   * Identifies the expression as an operator rather than a primary.
+   * @return if is operator true, not false.
+   */
   public boolean isOperator();
 
   /**
    * Returns the precedence of this expression
    * (only applicable to operators).
+   *
+   * @return precedence.
    */
   public int getPrecedence();
 

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/FindOptions.java

@@ -264,6 +264,7 @@ public class FindOptions {
 
   /**
    * Return the {@link Configuration} return configuration {@link Configuration}
+   * @return configuration.
    */
   public Configuration getConfiguration() {
     return this.configuration;

+ 17 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Result.java

@@ -35,23 +35,36 @@ public final class Result {
     this.descend = recurse;
   }
 
-  /** Should further directories be descended. */
+  /**
+   * Should further directories be descended.
+   * @return if is pass true,not false.
+   */
   public boolean isDescend() {
     return this.descend;
   }
 
-  /** Should processing continue. */
+  /**
+   * Should processing continue.
+   * @return if is pass true,not false.
+   */
   public boolean isPass() {
     return this.success;
   }
 
-  /** Returns the combination of this and another result. */
+  /**
+   * Returns the combination of this and another result.
+   * @param other other.
+   * @return result.
+   */
   public Result combine(Result other) {
     return new Result(this.isPass() && other.isPass(), this.isDescend()
         && other.isDescend());
   }
 
-  /** Negate this result. */
+  /**
+   * Negate this result.
+   * @return Result.
+   */
   public Result negate() {
     return new Result(!this.isPass(), this.isDescend());
   }

+ 7 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java

@@ -53,7 +53,7 @@ import static org.apache.hadoop.fs.statistics.impl.IOStatisticsBinding.snapshotM
  * deserialized. If for some reason this is required, use
  * {@link #requiredSerializationClasses()} to get the list of classes
  * used when deserializing instances of this object.
- * <p>
+ * </p>
  * <p>
  * It is annotated for correct serializations with jackson2.
  * </p>
@@ -238,6 +238,8 @@ public final class IOStatisticsSnapshot
   /**
    * Serialize by converting each map to a TreeMap, and saving that
    * to the stream.
+   * @param s ObjectOutputStream.
+   * @throws IOException raised on errors performing I/O.
    */
   private synchronized void writeObject(ObjectOutputStream s)
       throws IOException {
@@ -253,6 +255,10 @@ public final class IOStatisticsSnapshot
   /**
    * Deserialize by loading each TreeMap, and building concurrent
    * hash maps from them.
+   *
+   * @param s ObjectInputStream.
+   * @throws IOException raised on errors performing I/O.
+   * @throws ClassNotFoundException class not found exception
    */
   private void readObject(final ObjectInputStream s)
       throws IOException, ClassNotFoundException {

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSupport.java

@@ -71,6 +71,7 @@ public final class IOStatisticsSupport {
    * Returns null if the source isn't of the write type
    * or the return value of
    * {@link IOStatisticsSource#getIOStatistics()} was null.
+   * @param source source.
    * @return an IOStatistics instance or null
    */
 

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/MeanStatistic.java

@@ -207,6 +207,7 @@ public final class MeanStatistic implements Serializable, Cloneable {
   /**
    * Add another MeanStatistic.
    * @param other other value
+   * @return mean statistic.
    */
   public synchronized MeanStatistic add(final MeanStatistic other) {
     if (other.isEmpty()) {

+ 4 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/impl/IOStatisticsBinding.java

@@ -141,6 +141,7 @@ public final class IOStatisticsBinding {
   /**
    * Convert entry values to the string format used in logging.
    *
+   * @param <E> type of values.
    * @param name statistic name
    * @param value stat value
    * @return formatted string
@@ -178,6 +179,8 @@ public final class IOStatisticsBinding {
   /**
    * A passthrough copy operation suitable for immutable
    * types, including numbers.
+   *
+   * @param <E> type of values.
    * @param src source object
    * @return the source object
    */
@@ -437,6 +440,7 @@ public final class IOStatisticsBinding {
    * @param input input callable.
    * @param <B> return type.
    * @return the result of the operation.
+   * @throws IOException raised on errors performing I/O.
    */
   public static <B> B trackDuration(
       DurationTrackerFactory factory,

+ 4 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/DataBlocks.java

@@ -107,6 +107,7 @@ public final class DataBlocks {
    * @param len number of bytes to be written.
    * @throws NullPointerException      for a null buffer
    * @throws IndexOutOfBoundsException if indices are out of range
+   * @throws IOException raised on errors performing I/O.
    */
   public static void validateWriteArgs(byte[] b, int off, int len)
       throws IOException {
@@ -287,6 +288,7 @@ public final class DataBlocks {
      * @param limit      limit of the block.
      * @param statistics stats to work with
      * @return a new block.
+     * @throws IOException raised on errors performing I/O.
      */
     public abstract DataBlock create(long index, int limit,
         BlockUploadStatistics statistics)
@@ -482,6 +484,8 @@ public final class DataBlocks {
 
     /**
      * Inner close logic for subclasses to implement.
+     *
+     * @throws IOException raised on errors performing I/O.
      */
     protected void innerClose() throws IOException {
 

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/audit/AuditingFunctions.java

@@ -86,6 +86,8 @@ public final class AuditingFunctions {
    * activates and deactivates the span around the inner one.
    * @param auditSpan audit span
    * @param operation operation
+   * @param <T> Generics Type T.
+   * @param <R> Generics Type R.
    * @return a new invocation.
    */
   public static <T, R> FunctionRaisingIOE<T, R> withinAuditSpan(

+ 35 - 21
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ConfigUtil.java

@@ -48,7 +48,7 @@ public class ConfigUtil {
   /**
    * Add a link to the config for the specified mount table
    * @param conf - add the link to this conf
-   * @param mountTableName
+   * @param mountTableName mountTable.
    * @param src - the src path name
    * @param target - the target URI link
    */
@@ -71,9 +71,10 @@ public class ConfigUtil {
 
   /**
    * Add a LinkMergeSlash to the config for the specified mount table.
-   * @param conf
-   * @param mountTableName
-   * @param target
+   *
+   * @param conf configuration.
+   * @param mountTableName mountTable.
+   * @param target target.
    */
   public static void addLinkMergeSlash(Configuration conf,
       final String mountTableName, final URI target) {
@@ -83,8 +84,9 @@ public class ConfigUtil {
 
   /**
    * Add a LinkMergeSlash to the config for the default mount table.
-   * @param conf
-   * @param target
+   *
+   * @param conf configuration.
+   * @param target targets.
    */
   public static void addLinkMergeSlash(Configuration conf, final URI target) {
     addLinkMergeSlash(conf, getDefaultMountTableName(conf), target);
@@ -92,9 +94,10 @@ public class ConfigUtil {
 
   /**
    * Add a LinkFallback to the config for the specified mount table.
-   * @param conf
-   * @param mountTableName
-   * @param target
+   *
+   * @param conf configuration.
+   * @param mountTableName mountTable.
+   * @param target targets.
    */
   public static void addLinkFallback(Configuration conf,
       final String mountTableName, final URI target) {
@@ -104,8 +107,9 @@ public class ConfigUtil {
 
   /**
    * Add a LinkFallback to the config for the default mount table.
-   * @param conf
-   * @param target
+   *
+   * @param conf configuration.
+   * @param target targets.
    */
   public static void addLinkFallback(Configuration conf, final URI target) {
     addLinkFallback(conf, getDefaultMountTableName(conf), target);
@@ -113,9 +117,10 @@ public class ConfigUtil {
 
   /**
    * Add a LinkMerge to the config for the specified mount table.
-   * @param conf
-   * @param mountTableName
-   * @param targets
+   *
+   * @param conf configuration.
+   * @param mountTableName mountTable.
+   * @param targets targets.
    */
   public static void addLinkMerge(Configuration conf,
       final String mountTableName, final URI[] targets) {
@@ -125,8 +130,9 @@ public class ConfigUtil {
 
   /**
    * Add a LinkMerge to the config for the default mount table.
-   * @param conf
-   * @param targets
+   *
+   * @param conf configuration.
+   * @param targets targets array.
    */
   public static void addLinkMerge(Configuration conf, final URI[] targets) {
     addLinkMerge(conf, getDefaultMountTableName(conf), targets);
@@ -134,6 +140,12 @@ public class ConfigUtil {
 
   /**
    * Add nfly link to configuration for the given mount table.
+   *
+   * @param conf configuration.
+   * @param mountTableName mount table.
+   * @param src src.
+   * @param settings settings.
+   * @param targets targets.
    */
   public static void addLinkNfly(Configuration conf, String mountTableName,
       String src, String settings, final String targets) {
@@ -144,12 +156,13 @@ public class ConfigUtil {
   }
 
   /**
+   * Add nfly link to configuration for the given mount table.
    *
-   * @param conf
-   * @param mountTableName
-   * @param src
-   * @param settings
-   * @param targets
+   * @param conf configuration.
+   * @param mountTableName mount table.
+   * @param src src.
+   * @param settings settings.
+   * @param targets targets.
    */
   public static void addLinkNfly(Configuration conf, String mountTableName,
       String src, String settings, final URI ... targets) {
@@ -202,6 +215,7 @@ public class ConfigUtil {
    * Add config variable for homedir the specified mount table
    * @param conf - add to this conf
    * @param homedir - the home dir path starting with slash
+   * @param mountTableName - the mount table.
    */
   public static void setHomeDirConf(final Configuration conf,
               final String mountTableName, final String homedir) {

+ 9 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java

@@ -34,6 +34,10 @@ public class FsGetter {
 
   /**
    * Gets new file system instance of given uri.
+   * @param uri uri.
+   * @param conf configuration.
+   * @throws IOException raised on errors performing I/O.
+   * @return file system.
    */
   public FileSystem getNewInstance(URI uri, Configuration conf)
       throws IOException {
@@ -42,6 +46,11 @@ public class FsGetter {
 
   /**
    * Gets file system instance of given uri.
+   *
+   * @param uri uri.
+   * @param conf configuration.
+   * @throws IOException raised on errors performing I/O.
+   * @return FileSystem.
    */
   public FileSystem get(URI uri, Configuration conf) throws IOException {
     return FileSystem.get(uri, conf);

+ 33 - 20
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java

@@ -59,7 +59,7 @@ import org.slf4j.LoggerFactory;
  * @param <T> is AbstractFileSystem or FileSystem
  *
  * The two main methods are
- * {@link #InodeTree(Configuration, String)} // constructor
+ * {@link #InodeTree(Configuration, String, URI, boolean)} // constructor
  * {@link #resolve(String, boolean)}
  */
 
@@ -325,8 +325,8 @@ public abstract class InodeTree<T> {
 
    * A merge dir link is  a merge (junction) of links to dirs:
    * example : merge of 2 dirs
-   *     /users -> hdfs:nn1//users
-   *     /users -> hdfs:nn2//users
+   *     /users -&gt; hdfs:nn1//users
+   *     /users -&gt; hdfs:nn2//users
    *
    * For a merge, each target is checked to be dir when created but if target
    * is changed later it is then ignored (a dir with null entries)
@@ -364,6 +364,8 @@ public abstract class InodeTree<T> {
     /**
      * Get the target of the link. If a merge link then it returned
      * as "," separated URI list.
+     *
+     * @return the path.
      */
     public Path getTargetLink() {
       StringBuilder result = new StringBuilder(targetDirLinkList[0].toString());
@@ -387,7 +389,7 @@ public abstract class InodeTree<T> {
     /**
      * Get the instance of FileSystem to use, creating one if needed.
      * @return An Initialized instance of T
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     public T getTargetFileSystem() throws IOException {
       if (targetFileSystem != null) {
@@ -500,7 +502,7 @@ public abstract class InodeTree<T> {
   /**
    * The user of this class must subclass and implement the following
    * 3 abstract methods.
-   * @throws IOException
+   * @return Function.
    */
   protected abstract Function<URI, T> initAndGetTargetFs();
 
@@ -591,14 +593,21 @@ public abstract class InodeTree<T> {
   }
 
   /**
-   * Create Inode Tree from the specified mount-table specified in Config
-   * @param config - the mount table keys are prefixed with
-   *       FsConstants.CONFIG_VIEWFS_PREFIX
-   * @param viewName - the name of the mount table - if null use defaultMT name
-   * @throws UnsupportedFileSystemException
-   * @throws URISyntaxException
-   * @throws FileAlreadyExistsException
-   * @throws IOException
+   * Create Inode Tree from the specified mount-table specified in Config.
+   *
+   * @param config the mount table keys are prefixed with
+   *               FsConstants.CONFIG_VIEWFS_PREFIX.
+   * @param viewName the name of the mount table
+   *                 if null use defaultMT name.
+   * @param theUri heUri.
+   * @param initingUriAsFallbackOnNoMounts initingUriAsFallbackOnNoMounts.
+   * @throws UnsupportedFileSystemException file system for <code>uri</code> is
+   *                                        not found.
+   * @throws URISyntaxException if the URI does not have an authority
+   *                            it is badly formed.
+   * @throws FileAlreadyExistsException there is a file at the path specified
+   *                                    or is discovered on one of its ancestors.
+   * @throws IOException raised on errors performing I/O.
    */
   protected InodeTree(final Configuration config, final String viewName,
       final URI theUri, boolean initingUriAsFallbackOnNoMounts)
@@ -872,9 +881,9 @@ public abstract class InodeTree<T> {
   /**
    * Resolve the pathname p relative to root InodeDir.
    * @param p - input path
-   * @param resolveLastComponent
+   * @param resolveLastComponent resolveLastComponent.
    * @return ResolveResult which allows further resolution of the remaining path
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public ResolveResult<T> resolve(final String p, final boolean resolveLastComponent)
       throws IOException {
@@ -996,14 +1005,14 @@ public abstract class InodeTree<T> {
   /**
    * Walk through all regex mount points to see
    * whether the path match any regex expressions.
-   *  E.g. link: ^/user/(?<username>\\w+) => s3://$user.apache.com/_${user}
+   *  E.g. link: ^/user/(?&lt;username&gt;\\w+) =&gt; s3://$user.apache.com/_${user}
    *  srcPath: is /user/hadoop/dir1
    *  resolveLastComponent: true
    *  then return value is s3://hadoop.apache.com/_hadoop
    *
-   * @param srcPath
-   * @param resolveLastComponent
-   * @return
+   * @param srcPath srcPath.
+   * @param resolveLastComponent resolveLastComponent.
+   * @return ResolveResult.
    */
   protected ResolveResult<T> tryResolveInRegexMountpoint(final String srcPath,
       final boolean resolveLastComponent) {
@@ -1021,7 +1030,7 @@ public abstract class InodeTree<T> {
    * Build resolve result.
    * Here's an example
    * Mountpoint: fs.viewfs.mounttable.mt
-   *     .linkRegex.replaceresolveddstpath:_:-#.^/user/(?<username>\w+)
+   *     .linkRegex.replaceresolveddstpath:_:-#.^/user/(??&lt;username&gt;\w+)
    * Value: /targetTestRoot/$username
    * Dir path to test:
    * viewfs://mt/user/hadoop_user1/hadoop_dir1
@@ -1030,6 +1039,10 @@ public abstract class InodeTree<T> {
    * targetOfResolvedPathStr: /targetTestRoot/hadoop-user1
    * remainingPath: /hadoop_dir1
    *
+   * @param resultKind resultKind.
+   * @param resolvedPathStr resolvedPathStr.
+   * @param targetOfResolvedPathStr targetOfResolvedPathStr.
+   * @param remainingPath remainingPath.
    * @return targetFileSystem or null on exceptions.
    */
   protected ResolveResult<T> buildResolveResultForRegexMountPoint(

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/MountTableConfigLoader.java

@@ -38,6 +38,7 @@ public interface MountTableConfigLoader {
    *          a directory in the case of multiple versions of mount-table
    *          files(Recommended option).
    * @param conf - Configuration object to add mount table.
+   * @throws IOException raised on errors performing I/O.
    */
   void load(String mountTableConfigPath, Configuration conf)
       throws IOException;

+ 10 - 8
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java

@@ -107,6 +107,8 @@ public class ViewFileSystem extends FileSystem {
 
   /**
    * Gets file system creator instance.
+   *
+   * @return fs getter.
    */
   protected FsGetter fsGetter() {
     return new FsGetter();
@@ -273,7 +275,7 @@ public class ViewFileSystem extends FileSystem {
    * {@link FileSystem#createFileSystem(URI, Configuration)}
    *
    * After this constructor is called initialize() is called.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public ViewFileSystem() throws IOException {
     ugi = UserGroupInformation.getCurrentUser();
@@ -382,10 +384,10 @@ public class ViewFileSystem extends FileSystem {
   }
 
   /**
-   * Convenience Constructor for apps to call directly
+   * Convenience Constructor for apps to call directly.
    * @param theUri which must be that of ViewFileSystem
-   * @param conf
-   * @throws IOException
+   * @param conf conf configuration.
+   * @throws IOException raised on errors performing I/O.
    */
   ViewFileSystem(final URI theUri, final Configuration conf)
       throws IOException {
@@ -394,9 +396,9 @@ public class ViewFileSystem extends FileSystem {
   }
 
   /**
-   * Convenience Constructor for apps to call directly
-   * @param conf
-   * @throws IOException
+   * Convenience Constructor for apps to call directly.
+   * @param conf configuration.
+   * @throws IOException raised on errors performing I/O.
    */
   public ViewFileSystem(final Configuration conf) throws IOException {
     this(FsConstants.VIEWFS_URI, conf);
@@ -1314,7 +1316,7 @@ public class ViewFileSystem extends FileSystem {
    * Constants#CONFIG_VIEWFS_LINK_MERGE_SLASH} is supported and is a valid
    * mount point. Else, throw NotInMountpointException.
    *
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   @Override
   public long getUsed() throws IOException {

+ 9 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemOverloadScheme.java

@@ -139,6 +139,8 @@ public class ViewFileSystemOverloadScheme extends ViewFileSystem {
 
   /**
    * Sets whether to add fallback automatically when no mount points found.
+   *
+   * @param addAutoFallbackOnNoMounts addAutoFallbackOnNoMounts.
    */
   public void setSupportAutoAddingFallbackOnNoMounts(
       boolean addAutoFallbackOnNoMounts) {
@@ -320,7 +322,8 @@ public class ViewFileSystemOverloadScheme extends ViewFileSystem {
    *
    * @param path - fs uri path
    * @param conf - configuration
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
+   * @return file system.
    */
   public FileSystem getRawFileSystem(Path path, Configuration conf)
       throws IOException {
@@ -339,6 +342,11 @@ public class ViewFileSystemOverloadScheme extends ViewFileSystem {
   /**
    * Gets the mount path info, which contains the target file system and
    * remaining path to pass to the target file system.
+   *
+   * @param path the path.
+   * @param conf configuration.
+   * @return mount path info.
+   * @throws IOException raised on errors performing I/O.
    */
   public MountPathInfo<FileSystem> getMountPathInfo(Path path,
       Configuration conf) throws IOException {

+ 3 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemUtil.java

@@ -44,7 +44,7 @@ public final class ViewFileSystemUtil {
   /**
    * Check if the FileSystem is a ViewFileSystem.
    *
-   * @param fileSystem
+   * @param fileSystem file system.
    * @return true if the fileSystem is ViewFileSystem
    */
   public static boolean isViewFileSystem(final FileSystem fileSystem) {
@@ -54,7 +54,7 @@ public final class ViewFileSystemUtil {
   /**
    * Check if the FileSystem is a ViewFileSystemOverloadScheme.
    *
-   * @param fileSystem
+   * @param fileSystem file system.
    * @return true if the fileSystem is ViewFileSystemOverloadScheme
    */
   public static boolean isViewFileSystemOverloadScheme(
@@ -101,6 +101,7 @@ public final class ViewFileSystemUtil {
    * @param fileSystem - ViewFileSystem on which mount point exists
    * @param path - URI for which FsStatus is requested
    * @return Map of ViewFsMountPoint and FsStatus
+   * @throws IOException raised on errors performing I/O.
    */
   public static Map<MountPoint, FsStatus> getStatus(
       FileSystem fileSystem, Path path) throws IOException {

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java

@@ -909,7 +909,7 @@ public class ViewFs extends AbstractFileSystem {
    *
    * @param src file or directory path.
    * @return storage policy for give file.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public BlockStoragePolicySpi getStoragePolicy(final Path src)
       throws IOException {

+ 28 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java

@@ -91,6 +91,8 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
      * 
      * Callback implementations are expected to manage their own
      * timeouts (e.g. when making an RPC to a remote node).
+     *
+     * @throws ServiceFailedException Service Failed Exception.
      */
     void becomeActive() throws ServiceFailedException;
 
@@ -119,6 +121,8 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
      * If there is any fatal error (e.g. wrong ACL's, unexpected Zookeeper
      * errors or Zookeeper persistent unavailability) then notifyFatalError is
      * called to notify the app about it.
+     *
+     * @param errorMessage error message.
      */
     void notifyFatalError(String errorMessage);
 
@@ -204,8 +208,12 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
    *                 ZK connection
    * @param app
    *          reference to callback interface object
-   * @throws IOException
+   * @param maxRetryNum maxRetryNum.
+   * @throws IOException raised on errors performing I/O.
    * @throws HadoopIllegalArgumentException
+   *         if valid data is not supplied.
+   * @throws KeeperException
+   *         other zookeeper operation errors.
    */
   public ActiveStandbyElector(String zookeeperHostPorts,
       int zookeeperSessionTimeout, String parentZnodeName, List<ACL> acl,
@@ -245,8 +253,13 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
    *          reference to callback interface object
    * @param failFast
    *          whether need to add the retry when establishing ZK connection.
+   * @param maxRetryNum max Retry Num
    * @throws IOException
+   *          raised on errors performing I/O.
    * @throws HadoopIllegalArgumentException
+   *          if valid data is not supplied.
+   * @throws KeeperException
+   *          other zookeeper operation errors.
    */
   public ActiveStandbyElector(String zookeeperHostPorts,
       int zookeeperSessionTimeout, String parentZnodeName, List<ACL> acl,
@@ -312,6 +325,8 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
   
   /**
    * @return true if the configured parent znode exists
+   * @throws IOException raised on errors performing I/O.
+   * @throws InterruptedException interrupted exception.
    */
   public synchronized boolean parentZNodeExists()
       throws IOException, InterruptedException {
@@ -327,6 +342,10 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
   /**
    * Utility function to ensure that the configured base znode exists.
    * This recursively creates the znode as well as all of its parents.
+   *
+   * @throws IOException raised on errors performing I/O.
+   * @throws InterruptedException interrupted exception.
+   * @throws KeeperException other zookeeper operation errors.
    */
   public synchronized void ensureParentZNode()
       throws IOException, InterruptedException, KeeperException {
@@ -371,6 +390,9 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
    * This recursively deletes everything within the znode as well as the
    * parent znode itself. It should only be used when it's certain that
    * no electors are currently participating in the election.
+   *
+   * @throws IOException raised on errors performing I/O.
+   * @throws InterruptedException interrupted exception.
    */
   public synchronized void clearParentZNode()
       throws IOException, InterruptedException {
@@ -435,6 +457,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
    * @throws KeeperException
    *           other zookeeper operation errors
    * @throws InterruptedException
+   *           interrupted exception.
    * @throws IOException
    *           when ZooKeeper connection could not be established
    */
@@ -684,7 +707,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
    * inherit and mock out the zookeeper instance
    * 
    * @return new zookeeper client instance
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    * @throws KeeperException zookeeper connectionloss exception
    */
   protected synchronized ZooKeeper connectToZooKeeper() throws IOException,
@@ -714,7 +737,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
    * inherit and pass in a mock object for zookeeper
    *
    * @return new zookeeper client instance
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   protected ZooKeeper createZooKeeper() throws IOException {
     return new ZooKeeper(zkHostPort, zkSessionTimeout, watcher);
@@ -781,6 +804,8 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
    * Sleep for the given number of milliseconds.
    * This is non-static, and separated out, so that unit tests
    * can override the behavior not to sleep.
+   *
+   * @param sleepMs sleep ms.
    */
   @VisibleForTesting
   protected void sleepFor(int sleepMs) {

Alguns arquivos não foram mostrados porque muitos arquivos mudaram nesse diff