瀏覽代碼

Merge -r 1368307:1368308 from trunk to branch. FIXES: HDFS-3724

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1368310 13f79535-47bb-0310-9956-ffa450edef68
Alejandro Abdelnur 12 年之前
父節點
當前提交
f5dfa69617
共有 56 個文件被更改,包括 165 次插入4 次删除
  1. 4 1
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
  2. 3 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSKerberosAuthenticator.java
  3. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSPseudoAuthenticator.java
  4. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java
  5. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java
  6. 17 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
  7. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java
  8. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSExceptionProvider.java
  9. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSKerberosAuthenticationHandler.java
  10. 18 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
  11. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSReleaseFilter.java
  12. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
  13. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java
  14. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/RunnableCallable.java
  15. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/XException.java
  16. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/BaseService.java
  17. 4 1
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java
  18. 3 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServerException.java
  19. 3 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Service.java
  20. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServiceException.java
  21. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenIdentifier.java
  22. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenManager.java
  23. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenManagerException.java
  24. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccess.java
  25. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccessException.java
  26. 3 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Groups.java
  27. 3 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Instrumentation.java
  28. 3 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/ProxyUser.java
  29. 3 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Scheduler.java
  30. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java
  31. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/instrumentation/InstrumentationService.java
  32. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/scheduler/SchedulerService.java
  33. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/DelegationTokenManagerService.java
  34. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/GroupsService.java
  35. 4 1
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/ProxyUserService.java
  36. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/FileSystemReleaseFilter.java
  37. 3 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/HostnameFilter.java
  38. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/MDCFilter.java
  39. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java
  40. 3 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/Check.java
  41. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/ConfigurationUtils.java
  42. 3 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java
  43. 3 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java
  44. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java
  45. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ExceptionProvider.java
  46. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/InputStreamEntity.java
  47. 3 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/IntegerParam.java
  48. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONMapProvider.java
  49. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONProvider.java
  50. 3 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/LongParam.java
  51. 2 1
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Param.java
  52. 3 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Parameters.java
  53. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java
  54. 3 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ShortParam.java
  55. 3 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/StringParam.java
  56. 2 0
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/UserProvider.java

+ 4 - 1
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java

@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.fs.http.client;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.DelegationTokenRenewer;
@@ -68,6 +69,7 @@ import java.util.concurrent.Callable;
  * <p/>
  * This implementation allows a user to access HDFS over HTTP via a HttpFSServer server.
  */
+@InterfaceAudience.Private
 public class HttpFSFileSystem extends FileSystem
   implements DelegationTokenRenewer.Renewable {
 
@@ -160,7 +162,8 @@ public class HttpFSFileSystem extends FileSystem
   private static final String HTTP_POST = "POST";
   private static final String HTTP_DELETE = "DELETE";
 
-  public enum Operation {
+  @InterfaceAudience.Private
+  public static enum Operation {
     OPEN(HTTP_GET), GETFILESTATUS(HTTP_GET), LISTSTATUS(HTTP_GET),
     GETHOMEDIRECTORY(HTTP_GET), GETCONTENTSUMMARY(HTTP_GET),
     GETFILECHECKSUM(HTTP_GET),  GETFILEBLOCKLOCATIONS(HTTP_GET),

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSKerberosAuthenticator.java

@@ -18,6 +18,7 @@
 package org.apache.hadoop.fs.http.client;
 
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
@@ -43,6 +44,7 @@ import java.util.Map;
  * A <code>KerberosAuthenticator</code> subclass that fallback to
  * {@link HttpFSPseudoAuthenticator}.
  */
+@InterfaceAudience.Private
 public class HttpFSKerberosAuthenticator extends KerberosAuthenticator {
 
   /**
@@ -71,6 +73,7 @@ public class HttpFSKerberosAuthenticator extends KerberosAuthenticator {
   /**
    * DelegationToken operations.
    */
+  @InterfaceAudience.Private
   public static enum DelegationTokenOperation {
     GETDELEGATIONTOKEN(HTTP_GET, true),
     GETDELEGATIONTOKENS(HTTP_GET, true),

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSPseudoAuthenticator.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.fs.http.client;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authentication.client.PseudoAuthenticator;
 
@@ -27,6 +28,7 @@ import java.io.IOException;
  * A <code>PseudoAuthenticator</code> subclass that uses FileSystemAccess's
  * <code>UserGroupInformation</code> to obtain the client user name (the UGI's login user).
  */
+@InterfaceAudience.Private
 public class HttpFSPseudoAuthenticator extends PseudoAuthenticator {
 
   /**

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java

@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.fs.http.client;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.Path;
 import org.json.simple.JSONObject;
 import org.json.simple.parser.JSONParser;
@@ -35,6 +36,7 @@ import java.util.Map;
 /**
  * Utility methods used by HttpFS classes.
  */
+@InterfaceAudience.Private
 public class HttpFSUtils {
 
   public static final String SERVICE_NAME = "/webhdfs";

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java

@@ -19,6 +19,7 @@
 package org.apache.hadoop.fs.http.server;
 
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
 
 import javax.servlet.Filter;
@@ -37,6 +38,7 @@ import java.util.Set;
  * Filter that Enforces the content-type to be application/octet-stream for
  * POST and PUT requests.
  */
+@InterfaceAudience.Private
 public class CheckUploadContentTypeFilter implements Filter {
 
   private static final Set<String> UPLOAD_OPERATIONS = new HashSet<String>();

+ 17 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java

@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.fs.http.server;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.FileChecksum;
 import org.apache.hadoop.fs.FileStatus;
@@ -40,6 +41,7 @@ import java.util.Map;
 /**
  * FileSystem operation executors used by {@link HttpFSServer}.
  */
+@InterfaceAudience.Private
 public class FSOperations {
 
   @SuppressWarnings({"unchecked", "deprecation"})
@@ -160,6 +162,7 @@ public class FSOperations {
   /**
    * Executor that performs an append FileSystemAccess files system operation.
    */
+  @InterfaceAudience.Private
   public static class FSAppend implements FileSystemAccess.FileSystemExecutor<Void> {
     private InputStream is;
     private Path path;
@@ -198,6 +201,7 @@ public class FSOperations {
   /**
    * Executor that performs a content-summary FileSystemAccess files system operation.
    */
+  @InterfaceAudience.Private
   public static class FSContentSummary implements FileSystemAccess.FileSystemExecutor<Map> {
     private Path path;
 
@@ -230,6 +234,7 @@ public class FSOperations {
   /**
    * Executor that performs a create FileSystemAccess files system operation.
    */
+  @InterfaceAudience.Private
   public static class FSCreate implements FileSystemAccess.FileSystemExecutor<Void> {
     private InputStream is;
     private Path path;
@@ -288,6 +293,7 @@ public class FSOperations {
   /**
    * Executor that performs a delete FileSystemAccess files system operation.
    */
+  @InterfaceAudience.Private
   public static class FSDelete implements FileSystemAccess.FileSystemExecutor<JSONObject> {
     private Path path;
     private boolean recursive;
@@ -324,6 +330,7 @@ public class FSOperations {
   /**
    * Executor that performs a file-checksum FileSystemAccess files system operation.
    */
+  @InterfaceAudience.Private
   public static class FSFileChecksum implements FileSystemAccess.FileSystemExecutor<Map> {
     private Path path;
 
@@ -356,6 +363,7 @@ public class FSOperations {
   /**
    * Executor that performs a file-status FileSystemAccess files system operation.
    */
+  @InterfaceAudience.Private
   public static class FSFileStatus implements FileSystemAccess.FileSystemExecutor<Map> {
     private Path path;
 
@@ -388,6 +396,7 @@ public class FSOperations {
   /**
    * Executor that performs a home-dir FileSystemAccess files system operation.
    */
+  @InterfaceAudience.Private
   public static class FSHomeDir implements FileSystemAccess.FileSystemExecutor<JSONObject> {
 
     /**
@@ -413,6 +422,7 @@ public class FSOperations {
   /**
    * Executor that performs a list-status FileSystemAccess files system operation.
    */
+  @InterfaceAudience.Private
   public static class FSListStatus implements FileSystemAccess.FileSystemExecutor<Map>, PathFilter {
     private Path path;
     private PathFilter filter;
@@ -456,6 +466,7 @@ public class FSOperations {
   /**
    * Executor that performs a mkdirs FileSystemAccess files system operation.
    */
+  @InterfaceAudience.Private
   public static class FSMkdirs implements FileSystemAccess.FileSystemExecutor<JSONObject> {
 
     private Path path;
@@ -494,6 +505,7 @@ public class FSOperations {
   /**
    * Executor that performs a open FileSystemAccess files system operation.
    */
+  @InterfaceAudience.Private
   public static class FSOpen implements FileSystemAccess.FileSystemExecutor<InputStream> {
     private Path path;
 
@@ -526,6 +538,7 @@ public class FSOperations {
   /**
    * Executor that performs a rename FileSystemAccess files system operation.
    */
+  @InterfaceAudience.Private
   public static class FSRename implements FileSystemAccess.FileSystemExecutor<JSONObject> {
     private Path path;
     private Path toPath;
@@ -562,6 +575,7 @@ public class FSOperations {
   /**
    * Executor that performs a set-owner FileSystemAccess files system operation.
    */
+  @InterfaceAudience.Private
   public static class FSSetOwner implements FileSystemAccess.FileSystemExecutor<Void> {
     private Path path;
     private String owner;
@@ -600,6 +614,7 @@ public class FSOperations {
   /**
    * Executor that performs a set-permission FileSystemAccess files system operation.
    */
+  @InterfaceAudience.Private
   public static class FSSetPermission implements FileSystemAccess.FileSystemExecutor<Void> {
 
     private Path path;
@@ -637,6 +652,7 @@ public class FSOperations {
   /**
    * Executor that performs a set-replication FileSystemAccess files system operation.
    */
+  @InterfaceAudience.Private
   public static class FSSetReplication implements FileSystemAccess.FileSystemExecutor<JSONObject> {
     private Path path;
     private short replication;
@@ -676,6 +692,7 @@ public class FSOperations {
   /**
    * Executor that performs a set-times FileSystemAccess files system operation.
    */
+  @InterfaceAudience.Private
   public static class FSSetTimes implements FileSystemAccess.FileSystemExecutor<Void> {
     private Path path;
     private long mTime;

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java

@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.fs.http.server;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
 import javax.servlet.FilterConfig;
@@ -30,6 +31,7 @@ import java.util.Properties;
  * Subclass of hadoop-auth <code>AuthenticationFilter</code> that obtains its configuration
  * from HttpFSServer's server configuration.
  */
+@InterfaceAudience.Private
 public class HttpFSAuthenticationFilter extends AuthenticationFilter {
   private static final String CONF_PREFIX = "httpfs.authentication.";
 

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSExceptionProvider.java

@@ -19,6 +19,7 @@
 package org.apache.hadoop.fs.http.server;
 
 import com.sun.jersey.api.container.ContainerException;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.lib.service.FileSystemAccessException;
 import org.apache.hadoop.lib.wsrs.ExceptionProvider;
 import org.slf4j.Logger;
@@ -35,6 +36,7 @@ import java.io.IOException;
  * exceptions to HTTP status codes.
  */
 @Provider
+@InterfaceAudience.Private
 public class HttpFSExceptionProvider extends ExceptionProvider {
   private static Logger AUDIT_LOG = LoggerFactory.getLogger("httpfsaudit");
   private static Logger LOG = LoggerFactory.getLogger(HttpFSExceptionProvider.class);

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSKerberosAuthenticationHandler.java

@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.fs.http.server;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
 import org.apache.hadoop.fs.http.client.HttpFSKerberosAuthenticator;
 import org.apache.hadoop.fs.http.client.HttpFSKerberosAuthenticator.DelegationTokenOperation;
@@ -52,6 +53,7 @@ import java.util.Set;
  * If not delegation token is present in the request it delegates to the
  * {@link KerberosAuthenticationHandler}
  */
+@InterfaceAudience.Private
 public class HttpFSKerberosAuthenticationHandler
   extends KerberosAuthenticationHandler {
 

+ 18 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java

@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.fs.http.server;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
 import org.apache.hadoop.fs.http.client.HttpFSFileSystem.Operation;
 import org.apache.hadoop.lib.wsrs.BooleanParam;
@@ -38,6 +39,7 @@ import java.util.regex.Pattern;
  * HttpFS ParametersProvider.
  */
 @Provider
+@InterfaceAudience.Private
 public class HttpFSParametersProvider extends ParametersProvider {
 
   private static final Map<Enum, Class<Param<?>>[]> PARAMS_DEF =
@@ -85,6 +87,7 @@ public class HttpFSParametersProvider extends ParametersProvider {
   /**
    * Class for access-time parameter.
    */
+  @InterfaceAudience.Private
   public static class AccessTimeParam extends LongParam {
 
     /**
@@ -102,6 +105,7 @@ public class HttpFSParametersProvider extends ParametersProvider {
   /**
    * Class for block-size parameter.
    */
+  @InterfaceAudience.Private
   public static class BlockSizeParam extends LongParam {
 
     /**
@@ -120,6 +124,7 @@ public class HttpFSParametersProvider extends ParametersProvider {
   /**
    * Class for data parameter.
    */
+  @InterfaceAudience.Private
   public static class DataParam extends BooleanParam {
 
     /**
@@ -138,6 +143,7 @@ public class HttpFSParametersProvider extends ParametersProvider {
   /**
    * Class for operation parameter.
    */
+  @InterfaceAudience.Private
   public static class OperationParam extends EnumParam<HttpFSFileSystem.Operation> {
 
     /**
@@ -156,6 +162,7 @@ public class HttpFSParametersProvider extends ParametersProvider {
   /**
    * Class for delete's recursive parameter.
    */
+  @InterfaceAudience.Private
   public static class RecursiveParam extends BooleanParam {
 
     /**
@@ -174,6 +181,7 @@ public class HttpFSParametersProvider extends ParametersProvider {
   /**
    * Class for do-as parameter.
    */
+  @InterfaceAudience.Private
   public static class DoAsParam extends StringParam {
 
     /**
@@ -208,6 +216,7 @@ public class HttpFSParametersProvider extends ParametersProvider {
   /**
    * Class for filter parameter.
    */
+  @InterfaceAudience.Private
   public static class FilterParam extends StringParam {
 
     /**
@@ -227,6 +236,7 @@ public class HttpFSParametersProvider extends ParametersProvider {
   /**
    * Class for group parameter.
    */
+  @InterfaceAudience.Private
   public static class GroupParam extends StringParam {
 
     /**
@@ -246,6 +256,7 @@ public class HttpFSParametersProvider extends ParametersProvider {
   /**
    * Class for len parameter.
    */
+  @InterfaceAudience.Private
   public static class LenParam extends LongParam {
 
     /**
@@ -264,6 +275,7 @@ public class HttpFSParametersProvider extends ParametersProvider {
   /**
    * Class for modified-time parameter.
    */
+  @InterfaceAudience.Private
   public static class ModifiedTimeParam extends LongParam {
 
     /**
@@ -282,6 +294,7 @@ public class HttpFSParametersProvider extends ParametersProvider {
   /**
    * Class for offset parameter.
    */
+  @InterfaceAudience.Private
   public static class OffsetParam extends LongParam {
 
     /**
@@ -300,6 +313,7 @@ public class HttpFSParametersProvider extends ParametersProvider {
   /**
    * Class for overwrite parameter.
    */
+  @InterfaceAudience.Private
   public static class OverwriteParam extends BooleanParam {
 
     /**
@@ -318,6 +332,7 @@ public class HttpFSParametersProvider extends ParametersProvider {
   /**
    * Class for owner parameter.
    */
+  @InterfaceAudience.Private
   public static class OwnerParam extends StringParam {
 
     /**
@@ -337,6 +352,7 @@ public class HttpFSParametersProvider extends ParametersProvider {
   /**
    * Class for permission parameter.
    */
+  @InterfaceAudience.Private
   public static class PermissionParam extends ShortParam {
 
     /**
@@ -357,6 +373,7 @@ public class HttpFSParametersProvider extends ParametersProvider {
   /**
    * Class for replication parameter.
    */
+  @InterfaceAudience.Private
   public static class ReplicationParam extends ShortParam {
 
     /**
@@ -375,6 +392,7 @@ public class HttpFSParametersProvider extends ParametersProvider {
   /**
    * Class for to-path parameter.
    */
+  @InterfaceAudience.Private
   public static class DestinationParam extends StringParam {
 
     /**

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSReleaseFilter.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.fs.http.server;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.lib.service.FileSystemAccess;
 import org.apache.hadoop.lib.servlet.FileSystemReleaseFilter;
 
@@ -25,6 +26,7 @@ import org.apache.hadoop.lib.servlet.FileSystemReleaseFilter;
  * Filter that releases FileSystemAccess filesystem instances upon HTTP request
  * completion.
  */
+@InterfaceAudience.Private
 public class HttpFSReleaseFilter extends FileSystemReleaseFilter {
 
   /**

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.fs.http.server;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
@@ -82,6 +83,7 @@ import java.util.Map;
  * different operations.
  */
 @Path(HttpFSFileSystem.SERVICE_VERSION)
+@InterfaceAudience.Private
 public class HttpFSServer {
   private static Logger AUDIT_LOG = LoggerFactory.getLogger("httpfsaudit");
 

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.fs.http.server;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.lib.server.ServerException;
@@ -39,6 +40,7 @@ import java.io.IOException;
  * All the configuration is loaded from configuration properties prefixed
  * with <code>httpfs.</code>.
  */
+@InterfaceAudience.Private
 public class HttpFSServerWebApp extends ServerWebApp {
   private static final Logger LOG =
     LoggerFactory.getLogger(HttpFSServerWebApp.class);

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/RunnableCallable.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.lib.lang;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.lib.util.Check;
 
 import java.util.concurrent.Callable;
@@ -26,6 +27,7 @@ import java.util.concurrent.Callable;
  * Adapter class that allows <code>Runnable</code>s and <code>Callable</code>s to
  * be treated as the other.
  */
+@InterfaceAudience.Private
 public class RunnableCallable implements Callable<Void>, Runnable {
   private Runnable runnable;
   private Callable<?> callable;

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/XException.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.lib.lang;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.lib.util.Check;
 
 import java.text.MessageFormat;
@@ -26,6 +27,7 @@ import java.text.MessageFormat;
  * Generic exception that requires error codes and uses the a message
  * template from the error code.
  */
+@InterfaceAudience.Private
 public class XException extends Exception {
 
   /**

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/BaseService.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.lib.server;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.lib.util.ConfigurationUtils;
 
@@ -26,6 +27,7 @@ import java.util.Map;
 /**
  * Convenience class implementing the {@link Service} interface.
  */
+@InterfaceAudience.Private
 public abstract class BaseService implements Service {
   private String prefix;
   private Server server;

+ 4 - 1
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.lib.server;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.lib.util.Check;
 import org.apache.hadoop.lib.util.ConfigurationUtils;
@@ -76,6 +77,7 @@ import java.util.Properties;
  * post-initialized (this enables late/conditional service bindings).
  * <p/>
  */
+@InterfaceAudience.Private
 public class Server {
   private Logger log;
 
@@ -97,7 +99,8 @@ public class Server {
   /**
    * Enumeration that defines the server status.
    */
-  public enum Status {
+  @InterfaceAudience.Private
+  public static enum Status {
     UNDEF(false, false),
     BOOTING(false, true),
     HALTED(true, true),

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServerException.java

@@ -18,16 +18,19 @@
 
 package org.apache.hadoop.lib.server;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.lib.lang.XException;
 
 /**
  * Exception thrown by the {@link Server} class.
  */
+@InterfaceAudience.Private
 public class ServerException extends XException {
 
   /**
    * Error codes use by the {@link Server} class.
    */
+  @InterfaceAudience.Private
   public static enum ERROR implements XException.ERROR {
     S01("Dir [{0}] does not exist"),
     S02("[{0}] is not a directory"),

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Service.java

@@ -18,9 +18,12 @@
 
 package org.apache.hadoop.lib.server;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /**
  * Service interface for components to be managed by the {@link Server} class.
  */
+@InterfaceAudience.Private
 public interface Service {
 
   /**

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServiceException.java

@@ -18,11 +18,13 @@
 
 package org.apache.hadoop.lib.server;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.lib.lang.XException;
 
 /**
  * Exception thrown by {@link Service} implementations.
  */
+@InterfaceAudience.Private
 public class ServiceException extends ServerException {
 
   /**

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenIdentifier.java

@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.lib.service;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.http.client.HttpFSKerberosAuthenticator;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
@@ -24,6 +25,7 @@ import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdenti
 /**
  * HttpFS <code>DelegationTokenIdentifier</code> implementation.
  */
+@InterfaceAudience.Private
 public class DelegationTokenIdentifier
   extends AbstractDelegationTokenIdentifier {
 

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenManager.java

@@ -17,12 +17,14 @@
  */
 package org.apache.hadoop.lib.service;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
 
 /**
  * Service interface to manage HttpFS delegation tokens.
  */
+@InterfaceAudience.Private
 public interface DelegationTokenManager {
 
   /**

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenManagerException.java

@@ -17,11 +17,13 @@
  */
 package org.apache.hadoop.lib.service;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.lib.lang.XException;
 
 /**
  * Exception thrown by the {@link DelegationTokenManager} service implementation.
  */
+@InterfaceAudience.Private
 public class DelegationTokenManagerException extends XException {
 
   public enum ERROR implements XException.ERROR {

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccess.java

@@ -18,11 +18,13 @@
 
 package org.apache.hadoop.lib.service;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 
 import java.io.IOException;
 
+@InterfaceAudience.Private
 public interface FileSystemAccess {
 
   public interface FileSystemExecutor<T> {

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccessException.java

@@ -18,8 +18,10 @@
 
 package org.apache.hadoop.lib.service;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.lib.lang.XException;
 
+@InterfaceAudience.Private
 public class FileSystemAccessException extends XException {
 
   public enum ERROR implements XException.ERROR {

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Groups.java

@@ -18,9 +18,12 @@
 
 package org.apache.hadoop.lib.service;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 import java.io.IOException;
 import java.util.List;
 
+@InterfaceAudience.Private
 public interface Groups {
 
   public List<String> getGroups(String user) throws IOException;

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Instrumentation.java

@@ -18,8 +18,11 @@
 
 package org.apache.hadoop.lib.service;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 import java.util.Map;
 
+@InterfaceAudience.Private
 public interface Instrumentation {
 
   public interface Cron {

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/ProxyUser.java

@@ -18,9 +18,12 @@
 
 package org.apache.hadoop.lib.service;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 import java.io.IOException;
 import java.security.AccessControlException;
 
+@InterfaceAudience.Private
 public interface ProxyUser {
 
   public void validate(String proxyUser, String proxyHost, String doAsUser) throws IOException, AccessControlException;

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Scheduler.java

@@ -18,9 +18,12 @@
 
 package org.apache.hadoop.lib.service;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 import java.util.concurrent.Callable;
 import java.util.concurrent.TimeUnit;
 
+@InterfaceAudience.Private
 public interface Scheduler {
 
   public abstract void schedule(Callable<?> callable, long delay, long interval, TimeUnit unit);

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.lib.service.hadoop;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.fs.FileSystem;
@@ -47,6 +48,7 @@ import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
 
+@InterfaceAudience.Private
 public class FileSystemAccessService extends BaseService implements FileSystemAccess {
   private static final Logger LOG = LoggerFactory.getLogger(FileSystemAccessService.class);
 

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/instrumentation/InstrumentationService.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.lib.service.instrumentation;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.lib.server.BaseService;
 import org.apache.hadoop.lib.server.ServiceException;
 import org.apache.hadoop.lib.service.Instrumentation;
@@ -39,6 +40,7 @@ import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 
+@InterfaceAudience.Private
 public class InstrumentationService extends BaseService implements Instrumentation {
   public static final String PREFIX = "instrumentation";
   public static final String CONF_TIMERS_SIZE = "timers.size";

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/scheduler/SchedulerService.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.lib.service.scheduler;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.lib.lang.RunnableCallable;
 import org.apache.hadoop.lib.server.BaseService;
 import org.apache.hadoop.lib.server.Server;
@@ -35,6 +36,7 @@ import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.ScheduledThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 
+@InterfaceAudience.Private
 public class SchedulerService extends BaseService implements Scheduler {
   private static final Logger LOG = LoggerFactory.getLogger(SchedulerService.class);
 

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/DelegationTokenManagerService.java

@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.lib.service.security;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.http.server.HttpFSServerWebApp;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.lib.server.BaseService;
@@ -37,6 +38,7 @@ import java.io.IOException;
 /**
  * DelegationTokenManager service implementation.
  */
+@InterfaceAudience.Private
 public class DelegationTokenManagerService extends BaseService
   implements DelegationTokenManager {
 

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/GroupsService.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.lib.service.security;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.lib.server.BaseService;
 import org.apache.hadoop.lib.server.ServiceException;
@@ -27,6 +28,7 @@ import org.apache.hadoop.lib.util.ConfigurationUtils;
 import java.io.IOException;
 import java.util.List;
 
+@InterfaceAudience.Private
 public class GroupsService extends BaseService implements Groups {
   private static final String PREFIX = "groups";
 

+ 4 - 1
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/ProxyUserService.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.lib.service.security;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.lib.lang.XException;
 import org.apache.hadoop.lib.server.BaseService;
 import org.apache.hadoop.lib.server.ServiceException;
@@ -38,10 +39,12 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+@InterfaceAudience.Private
 public class ProxyUserService extends BaseService implements ProxyUser {
   private static Logger LOG = LoggerFactory.getLogger(ProxyUserService.class);
 
-  public enum ERROR implements XException.ERROR {
+  @InterfaceAudience.Private
+  public static enum ERROR implements XException.ERROR {
     PRXU01("Could not normalize host name [{0}], {1}"),
     PRXU02("Missing [{0}] property");
 

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/FileSystemReleaseFilter.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.lib.servlet;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.lib.service.FileSystemAccess;
 
@@ -37,6 +38,7 @@ import java.io.IOException;
  * is streaming out HDFS data and the corresponding filesystem
  * instance have to be closed after the streaming completes.
  */
+@InterfaceAudience.Private
 public abstract class FileSystemReleaseFilter implements Filter {
   private static final ThreadLocal<FileSystem> FILE_SYSTEM_TL = new ThreadLocal<FileSystem>();
 

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/HostnameFilter.java

@@ -19,6 +19,8 @@
 package org.apache.hadoop.lib.servlet;
 
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 import javax.servlet.Filter;
 import javax.servlet.FilterChain;
 import javax.servlet.FilterConfig;
@@ -31,6 +33,7 @@ import java.net.InetAddress;
 /**
  * Filter that resolves the requester hostname.
  */
+@InterfaceAudience.Private
 public class HostnameFilter implements Filter {
   static final ThreadLocal<String> HOSTNAME_TL = new ThreadLocal<String>();
 

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/MDCFilter.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.lib.servlet;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.slf4j.MDC;
 
 import javax.servlet.Filter;
@@ -42,6 +43,7 @@ import java.security.Principal;
  * <li>path: the path of the request URL</li>
  * </ul>
  */
+@InterfaceAudience.Private
 public class MDCFilter implements Filter {
 
   /**

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java

@@ -19,6 +19,7 @@
 package org.apache.hadoop.lib.servlet;
 
 import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.lib.server.Server;
 import org.apache.hadoop.lib.server.ServerException;
@@ -34,6 +35,7 @@ import java.text.MessageFormat;
  * {@link Server} subclass that implements <code>ServletContextListener</code>
  * and uses its lifecycle to start and stop the server.
  */
+@InterfaceAudience.Private
 public abstract class ServerWebApp extends Server implements ServletContextListener {
 
   private static final String HOME_DIR = ".home.dir";

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/Check.java

@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.lib.util;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 import java.text.MessageFormat;
 import java.util.List;
 import java.util.regex.Pattern;
@@ -27,6 +29,7 @@ import java.util.regex.Pattern;
  * <p/>
  * Commonly used for method arguments preconditions.
  */
+@InterfaceAudience.Private
 public class Check {
 
   /**

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/ConfigurationUtils.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.lib.util;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.w3c.dom.DOMException;
 import org.w3c.dom.Document;
@@ -37,6 +38,7 @@ import java.util.Map;
 /**
  * Configuration utilities.
  */
+@InterfaceAudience.Private
 public abstract class ConfigurationUtils {
 
   /**

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java

@@ -18,8 +18,11 @@
 
 package org.apache.hadoop.lib.wsrs;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 import java.text.MessageFormat;
 
+@InterfaceAudience.Private
 public abstract class BooleanParam extends Param<Boolean> {
 
   public BooleanParam(String name, Boolean defaultValue) {

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java

@@ -18,6 +18,9 @@
 
 package org.apache.hadoop.lib.wsrs;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
+@InterfaceAudience.Private
 public abstract class ByteParam extends Param<Byte> {
 
   public ByteParam(String name, Byte defaultValue) {

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java

@@ -18,10 +18,12 @@
 
 package org.apache.hadoop.lib.wsrs;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.util.StringUtils;
 
 import java.util.Arrays;
 
+@InterfaceAudience.Private
 public abstract class EnumParam<E extends Enum<E>> extends Param<E> {
   Class<E> klass;
 

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ExceptionProvider.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.lib.wsrs;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -28,6 +29,7 @@ import javax.ws.rs.ext.ExceptionMapper;
 import java.util.LinkedHashMap;
 import java.util.Map;
 
+@InterfaceAudience.Private
 public class ExceptionProvider implements ExceptionMapper<Throwable> {
   private static Logger LOG = LoggerFactory.getLogger(ExceptionProvider.class);
 

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/InputStreamEntity.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.lib.wsrs;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.io.IOUtils;
 
 import javax.ws.rs.core.StreamingOutput;
@@ -25,6 +26,7 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 
+@InterfaceAudience.Private
 public class InputStreamEntity implements StreamingOutput {
   private InputStream is;
   private long offset;

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/IntegerParam.java

@@ -18,6 +18,9 @@
 
 package org.apache.hadoop.lib.wsrs;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
+@InterfaceAudience.Private
 public abstract class IntegerParam extends Param<Integer> {
 
   public IntegerParam(String name, Integer defaultValue) {

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONMapProvider.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.lib.wsrs;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.json.simple.JSONObject;
 
 import javax.ws.rs.Produces;
@@ -36,6 +37,7 @@ import java.util.Map;
 
 @Provider
 @Produces(MediaType.APPLICATION_JSON)
+@InterfaceAudience.Private
 public class JSONMapProvider implements MessageBodyWriter<Map> {
   private static final String ENTER = System.getProperty("line.separator");
 

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONProvider.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.lib.wsrs;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.json.simple.JSONStreamAware;
 
 import javax.ws.rs.Produces;
@@ -35,6 +36,7 @@ import java.lang.reflect.Type;
 
 @Provider
 @Produces(MediaType.APPLICATION_JSON)
+@InterfaceAudience.Private
 public class JSONProvider implements MessageBodyWriter<JSONStreamAware> {
   private static final String ENTER = System.getProperty("line.separator");
 

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/LongParam.java

@@ -18,6 +18,9 @@
 
 package org.apache.hadoop.lib.wsrs;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
+@InterfaceAudience.Private
 public abstract class LongParam extends Param<Long> {
 
   public LongParam(String name, Long defaultValue) {

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Param.java

@@ -18,10 +18,11 @@
 
 package org.apache.hadoop.lib.wsrs;
 
-import org.apache.hadoop.lib.util.Check;
+import org.apache.hadoop.classification.InterfaceAudience;
 
 import java.text.MessageFormat;
 
+@InterfaceAudience.Private
 public abstract class Param<T> {
   private String name;
   protected T value;

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Parameters.java

@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.lib.wsrs;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 import java.util.Map;
 
 /**
@@ -24,6 +26,7 @@ import java.util.Map;
  * <p/>
  * Instances are created by the {@link ParametersProvider} class.
  */
+@InterfaceAudience.Private
 public class Parameters {
   private Map<String, Param<?>> params;
 

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java

@@ -24,6 +24,7 @@ import com.sun.jersey.core.spi.component.ComponentScope;
 import com.sun.jersey.server.impl.inject.AbstractHttpContextInjectable;
 import com.sun.jersey.spi.inject.Injectable;
 import com.sun.jersey.spi.inject.InjectableProvider;
+import org.apache.hadoop.classification.InterfaceAudience;
 
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.MultivaluedMap;
@@ -36,6 +37,7 @@ import java.util.Map;
  * Jersey provider that parses the request parameters based on the
  * given parameter definition. 
  */
+@InterfaceAudience.Private
 public class ParametersProvider
   extends AbstractHttpContextInjectable<Parameters>
   implements InjectableProvider<Context, Type> {

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ShortParam.java

@@ -18,6 +18,9 @@
 
 package org.apache.hadoop.lib.wsrs;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
+@InterfaceAudience.Private
 public abstract class ShortParam extends Param<Short> {
 
   private int radix;

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/StringParam.java

@@ -17,9 +17,12 @@
  */
 package org.apache.hadoop.lib.wsrs;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 import java.text.MessageFormat;
 import java.util.regex.Pattern;
 
+@InterfaceAudience.Private
 public abstract class StringParam extends Param<String> {
   private Pattern pattern;
 

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/UserProvider.java

@@ -24,6 +24,7 @@ import com.sun.jersey.core.spi.component.ComponentScope;
 import com.sun.jersey.server.impl.inject.AbstractHttpContextInjectable;
 import com.sun.jersey.spi.inject.Injectable;
 import com.sun.jersey.spi.inject.InjectableProvider;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.slf4j.MDC;
 
 import javax.ws.rs.core.Context;
@@ -33,6 +34,7 @@ import java.security.Principal;
 import java.util.regex.Pattern;
 
 @Provider
+@InterfaceAudience.Private
 public class UserProvider extends AbstractHttpContextInjectable<Principal> implements
   InjectableProvider<Context, Type> {