Forráskód Böngészése

HDFS-5158. Add command-line support for manipulating cache directives

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-4949@1522272 13f79535-47bb-0310-9956-ffa450edef68
Colin McCabe 11 éve
szülő
commit
02e0e158a2
20 módosított fájl, 974 hozzáadás és 391 törlés
  1. 3 0
      hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt
  2. 3 0
      hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
  3. 42 0
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
  4. 19 19
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathBasedCacheDirectiveException.java
  5. 13 12
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
  6. 9 9
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheDirective.java
  7. 6 6
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheEntry.java
  8. 12 12
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/RemovePathBasedCacheEntryException.java
  9. 83 57
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java
  10. 82 80
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java
  11. 62 51
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java
  12. 9 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CachePool.java
  13. 18 18
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
  14. 20 16
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java
  15. 333 0
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java
  16. 29 32
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
  17. 137 0
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/TableListing.java
  18. 32 30
      hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto
  19. 60 46
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathBasedCacheRequests.java
  20. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt

@@ -30,6 +30,9 @@ HDFS-4949 (Unreleased)
     HDFS-5120. Add command-line support for manipulating cache pools.
     (Contributed by Colin Patrick McCabe)
 
+    HDFS-5158. Add command-line support for manipulating cache directives.
+    (Contributed by Colin Patrick McCabe)
+
 
   OPTIMIZATIONS
 

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs

@@ -59,6 +59,7 @@ function print_usage(){
   echo "						Use -help to see options"
   echo "  portmap              run a portmap service"
   echo "  nfs3                 run an NFS version 3 gateway"
+  echo "  cacheadmin           configure the HDFS cache"
   echo ""
   echo "Most commands print help when invoked w/o parameters."
 }
@@ -155,6 +156,8 @@ elif [ "$COMMAND" = "portmap" ] ; then
   CLASS=org.apache.hadoop.portmap.Portmap
 elif [ "$COMMAND" = "nfs3" ] ; then
   CLASS=org.apache.hadoop.hdfs.nfs.nfs3.Nfs3
+elif [ "$COMMAND" = "cacheadmin" ] ; then
+  CLASS=org.apache.hadoop.hdfs.tools.CacheAdmin
 else
   CLASS="$COMMAND"
 fi

+ 42 - 0
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java

@@ -67,6 +67,8 @@ import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
 import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
 import org.apache.hadoop.hdfs.protocol.HdfsLocatedFileStatus;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
 import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport;
 import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus;
 import org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException;
@@ -77,6 +79,7 @@ import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.token.SecretManager.InvalidToken;
 import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.util.Fallible;
 import org.apache.hadoop.util.Progressable;
 
 import com.google.common.annotations.VisibleForTesting;
@@ -1580,6 +1583,45 @@ public class DistributedFileSystem extends FileSystem {
     }.resolve(this, absF);
   }
 
+  /**
+   * Add some PathBasedCache directives.
+   * 
+   * @param directives A list of PathBasedCache directives to be added.
+   * @return A Fallible list, where each element is either a successfully addded
+   *         PathBasedCache entry, or an IOException describing why the directive
+   *         could not be added.
+   */
+  public List<Fallible<PathBasedCacheEntry>>
+      addPathBasedCacheDirective(List<PathBasedCacheDirective> directives)
+          throws IOException {
+    return dfs.namenode.addPathBasedCacheDirectives(directives);
+  }
+  
+  /**
+   * Remove some PathBasedCache entries.
+   * 
+   * @param ids A list of all the entry IDs to be removed.
+   * @return A Fallible list where each element is either a successfully removed
+   *         ID, or an IOException describing why the ID could not be removed.
+   */
+  public List<Fallible<Long>>
+      removePathBasedCacheEntries(List<Long> ids) throws IOException {
+    return dfs.namenode.removePathBasedCacheEntries(ids);
+  }
+  
+  /**
+   * List the set of cached paths of a cache pool. Incrementally fetches results
+   * from the server.
+   * 
+   * @param pool The cache pool to list, or null to list all pools.
+   * @param path The path name to list, or null to list all paths.
+   * @return A RemoteIterator which returns PathBasedCacheEntry objects.
+   */
+  public RemoteIterator<PathBasedCacheEntry> listPathBasedCacheEntries(
+      String pool, String path) throws IOException {
+    return dfs.namenode.listPathBasedCacheEntries(0, pool, path);
+  }
+
   /**
    * Add a cache pool.
    *

+ 19 - 19
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathCacheDirectiveException.java → hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathBasedCacheDirectiveException.java

@@ -20,69 +20,69 @@ package org.apache.hadoop.hdfs.protocol;
 import java.io.IOException;
 
 /**
- * An exception which occurred when trying to add a path cache directive.
+ * An exception which occurred when trying to add a PathBasedCache directive.
  */
-public abstract class AddPathCacheDirectiveException extends IOException {
+public abstract class AddPathBasedCacheDirectiveException extends IOException {
   private static final long serialVersionUID = 1L;
 
-  private final PathCacheDirective directive;
+  private final PathBasedCacheDirective directive;
   
-  public AddPathCacheDirectiveException(String description,
-      PathCacheDirective directive) {
+  public AddPathBasedCacheDirectiveException(String description,
+      PathBasedCacheDirective directive) {
     super(description);
     this.directive = directive;
   }
 
-  public PathCacheDirective getDirective() {
+  public PathBasedCacheDirective getDirective() {
     return directive;
   }
 
   public static final class EmptyPathError
-      extends AddPathCacheDirectiveException {
+      extends AddPathBasedCacheDirectiveException {
     private static final long serialVersionUID = 1L;
 
-    public EmptyPathError(PathCacheDirective directive) {
+    public EmptyPathError(PathBasedCacheDirective directive) {
       super("empty path in directive " + directive, directive);
     }
   }
 
   public static class InvalidPathNameError
-      extends AddPathCacheDirectiveException {
+      extends AddPathBasedCacheDirectiveException {
     private static final long serialVersionUID = 1L;
 
-    public InvalidPathNameError(PathCacheDirective directive) {
+    public InvalidPathNameError(PathBasedCacheDirective directive) {
       super("can't handle non-absolute path name " + directive.getPath(),
           directive);
     }
   }
 
   public static class InvalidPoolNameError
-      extends AddPathCacheDirectiveException {
+      extends AddPathBasedCacheDirectiveException {
     private static final long serialVersionUID = 1L;
 
-    public InvalidPoolNameError(PathCacheDirective directive) {
+    public InvalidPoolNameError(PathBasedCacheDirective directive) {
       super("invalid pool name '" + directive.getPool() + "'", directive);
     }
   }
 
   public static class PoolWritePermissionDeniedError
-      extends AddPathCacheDirectiveException {
+      extends AddPathBasedCacheDirectiveException {
     private static final long serialVersionUID = 1L;
 
-    public PoolWritePermissionDeniedError(PathCacheDirective directive) {
+    public PoolWritePermissionDeniedError(PathBasedCacheDirective directive) {
       super("write permission denied for pool '" + directive.getPool() + "'",
             directive);
     }
   }
 
-  public static class UnexpectedAddPathCacheDirectiveException
-      extends AddPathCacheDirectiveException {
+  public static class UnexpectedAddPathBasedCacheDirectiveException
+      extends AddPathBasedCacheDirectiveException {
     private static final long serialVersionUID = 1L;
 
-    public UnexpectedAddPathCacheDirectiveException(
-        PathCacheDirective directive) {
+    public UnexpectedAddPathBasedCacheDirectiveException(
+        PathBasedCacheDirective directive) {
       super("encountered an unexpected error when trying to " +
-          "add path cache directive " + directive, directive);
+          "add PathBasedCache directive " + directive, directive);
     }
   }
 };

+ 13 - 12
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java

@@ -1098,27 +1098,27 @@ public interface ClientProtocol {
       String fromSnapshot, String toSnapshot) throws IOException;
 
   /**
-   * Add some path cache directives to the CacheManager.
+   * Add some PathBasedCache directives to the CacheManager.
    * 
-   * @param directives A list of path cache directives to be added.
+   * @param directives A list of PathBasedCache directives to be added.
    * @return A Fallible list, where each element is either a successfully addded
-   *         path cache entry, or an IOException describing why the directive
+   *         PathBasedCache entry, or an IOException describing why the directive
    *         could not be added.
    */
   @AtMostOnce
-  public List<Fallible<PathCacheEntry>>
-    addPathCacheDirectives(List<PathCacheDirective> directives)
+  public List<Fallible<PathBasedCacheEntry>>
+    addPathBasedCacheDirectives(List<PathBasedCacheDirective> directives)
       throws IOException;
 
   /**
-   * Remove some path cache entries from the CacheManager.
+   * Remove some PathBasedCache entries from the CacheManager.
    * 
    * @param ids A list of all the entry IDs to be removed from the CacheManager.
    * @return A Fallible list where each element is either a successfully removed
    *         ID, or an IOException describing why the ID could not be removed.
    */
   @AtMostOnce
-  public List<Fallible<Long>> removePathCacheEntries(List<Long> ids)
+  public List<Fallible<Long>> removePathBasedCacheEntries(List<Long> ids)
       throws IOException;
 
   /**
@@ -1126,13 +1126,14 @@ public interface ClientProtocol {
    * from the server.
    * 
    * @param prevId The last listed entry ID, or -1 if this is the first call to
-   *          listPathCacheEntries.
-   * @param pool The cache pool to list, or the empty string to list all pools
-   * @return A RemoteIterator which returns PathCacheEntry objects.
+   *          listPathBasedCacheEntries.
+   * @param pool The cache pool to list, or null to list all pools.
+   * @param path The path name to list, or null to list all paths.
+   * @return A RemoteIterator which returns PathBasedCacheEntry objects.
    */
   @Idempotent
-  public RemoteIterator<PathCacheEntry> listPathCacheEntries(long prevId,
-      String pool) throws IOException;
+  public RemoteIterator<PathBasedCacheEntry> listPathBasedCacheEntries(long prevId,
+      String pool, String path) throws IOException;
   
   /**
    * Add a new cache pool.

+ 9 - 9
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathCacheDirective.java → hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheDirective.java

@@ -24,19 +24,19 @@ import com.google.common.collect.ComparisonChain;
 
 import org.apache.commons.lang.builder.HashCodeBuilder;
 import org.apache.hadoop.hdfs.DFSUtil;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.EmptyPathError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPoolNameError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPathNameError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.EmptyPathError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPoolNameError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPathNameError;
 
 /**
  * A directive to add a path to a cache pool.
  */
-public class PathCacheDirective implements Comparable<PathCacheDirective> {
+public class PathBasedCacheDirective implements Comparable<PathBasedCacheDirective> {
   private final String path;
 
   private final String pool;
 
-  public PathCacheDirective(String path, String pool) {
+  public PathBasedCacheDirective(String path, String pool) {
     Preconditions.checkNotNull(path);
     Preconditions.checkNotNull(pool);
     this.path = path;
@@ -58,10 +58,10 @@ public class PathCacheDirective implements Comparable<PathCacheDirective> {
   }
 
   /**
-   * Check if this PathCacheDirective is valid.
+   * Check if this PathBasedCacheDirective is valid.
    * 
    * @throws IOException
-   *     If this PathCacheDirective is not valid.
+   *     If this PathBasedCacheDirective is not valid.
    */
   public void validate() throws IOException {
     if (path.isEmpty()) {
@@ -76,7 +76,7 @@ public class PathCacheDirective implements Comparable<PathCacheDirective> {
   }
 
   @Override
-  public int compareTo(PathCacheDirective rhs) {
+  public int compareTo(PathBasedCacheDirective rhs) {
     return ComparisonChain.start().
         compare(pool, rhs.getPool()).
         compare(path, rhs.getPath()).
@@ -91,7 +91,7 @@ public class PathCacheDirective implements Comparable<PathCacheDirective> {
   @Override
   public boolean equals(Object o) {
     try {
-      PathCacheDirective other = (PathCacheDirective)o;
+      PathBasedCacheDirective other = (PathBasedCacheDirective)o;
       return other.compareTo(this) == 0;
     } catch (ClassCastException e) {
       return false;

+ 6 - 6
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathCacheEntry.java → hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheEntry.java

@@ -23,13 +23,13 @@ import org.apache.commons.lang.builder.HashCodeBuilder;
 import com.google.common.base.Preconditions;
 
 /**
- * An entry in the NameNode's path cache.
+ * An entry in the NameNode's PathBasedCache.
  */
-public final class PathCacheEntry {
+public final class PathBasedCacheEntry {
   private final long entryId;
-  private final PathCacheDirective directive;
+  private final PathBasedCacheDirective directive;
 
-  public PathCacheEntry(long entryId, PathCacheDirective directive) {
+  public PathBasedCacheEntry(long entryId, PathBasedCacheDirective directive) {
     Preconditions.checkArgument(entryId > 0);
     this.entryId = entryId;
     this.directive = directive;
@@ -39,14 +39,14 @@ public final class PathCacheEntry {
     return entryId;
   }
 
-  public PathCacheDirective getDirective() {
+  public PathBasedCacheDirective getDirective() {
     return directive;
   }
 
   @Override
   public boolean equals(Object o) {
     try {
-      PathCacheEntry other = (PathCacheEntry)o;
+      PathBasedCacheEntry other = (PathBasedCacheEntry)o;
       return new EqualsBuilder().
           append(this.entryId, other.entryId).
           append(this.directive, other.directive).

+ 12 - 12
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/RemovePathCacheEntryException.java → hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/RemovePathBasedCacheEntryException.java

@@ -22,14 +22,14 @@ import java.io.IOException;
 import com.google.common.base.Preconditions;
 
 /**
- * An exception which occurred when trying to remove a path cache entry.
+ * An exception which occurred when trying to remove a PathBasedCache entry.
  */
-public abstract class RemovePathCacheEntryException extends IOException {
+public abstract class RemovePathBasedCacheEntryException extends IOException {
   private static final long serialVersionUID = 1L;
 
   private final long entryId;
 
-  public RemovePathCacheEntryException(String description, long entryId) {
+  public RemovePathBasedCacheEntryException(String description, long entryId) {
     super(description);
     this.entryId = entryId;
   }
@@ -39,7 +39,7 @@ public abstract class RemovePathCacheEntryException extends IOException {
   }
 
   public final static class InvalidIdException
-      extends RemovePathCacheEntryException {
+      extends RemovePathBasedCacheEntryException {
     private static final long serialVersionUID = 1L;
 
     public InvalidIdException(long entryId) {
@@ -48,31 +48,31 @@ public abstract class RemovePathCacheEntryException extends IOException {
   }
 
   public final static class RemovePermissionDeniedException
-      extends RemovePathCacheEntryException {
+      extends RemovePathBasedCacheEntryException {
     private static final long serialVersionUID = 1L;
 
     public RemovePermissionDeniedException(long entryId) {
-      super("permission denied when trying to remove path cache entry id " +
+      super("permission denied when trying to remove PathBasedCache entry id " +
         entryId, entryId);
     }
   }
 
   public final static class NoSuchIdException
-      extends RemovePathCacheEntryException {
+      extends RemovePathBasedCacheEntryException {
     private static final long serialVersionUID = 1L;
 
     public NoSuchIdException(long entryId) {
-      super("there is no path cache entry with id " + entryId, entryId);
+      super("there is no PathBasedCache entry with id " + entryId, entryId);
     }
   }
 
-  public final static class UnexpectedRemovePathCacheEntryException
-      extends RemovePathCacheEntryException {
+  public final static class UnexpectedRemovePathBasedCacheEntryException
+      extends RemovePathBasedCacheEntryException {
     private static final long serialVersionUID = 1L;
 
-    public UnexpectedRemovePathCacheEntryException(long id) {
+    public UnexpectedRemovePathBasedCacheEntryException(long id) {
       super("encountered an unexpected error when trying to " +
-          "remove path cache entry id " + id, id);
+          "remove PathBasedCache entry id " + id, id);
     }
   }
 }

+ 83 - 57
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java

@@ -28,9 +28,10 @@ import org.apache.hadoop.fs.FsServerDefaults;
 import org.apache.hadoop.fs.Options.Rename;
 import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.EmptyPathError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPathNameError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPoolNameError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.EmptyPathError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPathNameError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPoolNameError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.PoolWritePermissionDeniedError;
 import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks;
@@ -38,11 +39,11 @@ import org.apache.hadoop.hdfs.protocol.DirectoryListing;
 import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
-import org.apache.hadoop.hdfs.protocol.PathCacheDirective;
-import org.apache.hadoop.hdfs.protocol.PathCacheEntry;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.InvalidIdException;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.NoSuchIdException;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.RemovePermissionDeniedException;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.InvalidIdException;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.NoSuchIdException;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.RemovePermissionDeniedException;
 import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport;
 import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AbandonBlockRequestProto;
@@ -51,9 +52,9 @@ import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddBlo
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddBlockResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddCachePoolRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddCachePoolResponseProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathCacheDirectiveErrorProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathCacheDirectivesRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathCacheDirectivesResponseProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathBasedCacheDirectiveErrorProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathBasedCacheDirectivesRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathBasedCacheDirectivesResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AllowSnapshotRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AllowSnapshotResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AppendRequestProto;
@@ -114,25 +115,25 @@ import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCa
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCachePoolsResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCorruptFileBlocksRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCorruptFileBlocksResponseProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathCacheEntriesElementProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathCacheEntriesRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathCacheEntriesResponseProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheEntriesElementProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheEntriesRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheEntriesResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MetaSaveRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MetaSaveResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MkdirsRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MkdirsResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ModifyCachePoolRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ModifyCachePoolResponseProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.PathCacheDirectiveProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.PathBasedCacheDirectiveProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RecoverLeaseRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RecoverLeaseResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RefreshNodesRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RefreshNodesResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemoveCachePoolRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemoveCachePoolResponseProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathCacheEntriesRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathCacheEntriesResponseProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathCacheEntryErrorProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheEntriesRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheEntriesResponseProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheEntryErrorProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.Rename2RequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.Rename2ResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RenameRequestProto;
@@ -174,7 +175,6 @@ import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey;
 import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
 import org.apache.hadoop.hdfs.server.namenode.CachePool;
 import org.apache.hadoop.hdfs.server.namenode.INodeId;
-import org.apache.hadoop.hdfs.server.namenode.UnsupportedActionException;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto;
 import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenResponseProto;
@@ -1039,34 +1039,39 @@ public class ClientNamenodeProtocolServerSideTranslatorPB implements
   }
 
   @Override
-  public AddPathCacheDirectivesResponseProto addPathCacheDirectives(RpcController controller,
-      AddPathCacheDirectivesRequestProto request) throws ServiceException {
+  public AddPathBasedCacheDirectivesResponseProto addPathBasedCacheDirectives(RpcController controller,
+      AddPathBasedCacheDirectivesRequestProto request) throws ServiceException {
     try {
-      ArrayList<PathCacheDirective> input =
-          new ArrayList<PathCacheDirective>(request.getElementsCount());
+      ArrayList<PathBasedCacheDirective> input =
+          new ArrayList<PathBasedCacheDirective>(request.getElementsCount());
       for (int i = 0; i < request.getElementsCount(); i++) {
-        PathCacheDirectiveProto proto = request.getElements(i);
-        input.add(new PathCacheDirective(proto.getPath(), proto.getPool()));
+        PathBasedCacheDirectiveProto proto = request.getElements(i);
+        input.add(new PathBasedCacheDirective(proto.getPath(), proto.getPool()));
       }
-      List<Fallible<PathCacheEntry>> output = server.addPathCacheDirectives(input);
-      AddPathCacheDirectivesResponseProto.Builder builder =
-         AddPathCacheDirectivesResponseProto.newBuilder();
+      List<Fallible<PathBasedCacheEntry>> output = server.addPathBasedCacheDirectives(input);
+      AddPathBasedCacheDirectivesResponseProto.Builder builder =
+         AddPathBasedCacheDirectivesResponseProto.newBuilder();
       for (int idx = 0; idx < output.size(); idx++) {
         try {
-          PathCacheEntry entry = output.get(idx).get();
+          PathBasedCacheEntry entry = output.get(idx).get();
           builder.addResults(entry.getEntryId());
-        } catch (EmptyPathError ioe) {
-          builder.addResults(AddPathCacheDirectiveErrorProto.
-              EMPTY_PATH_ERROR_VALUE);
-        } catch (InvalidPathNameError ioe) {
-          builder.addResults(AddPathCacheDirectiveErrorProto.
-              INVALID_PATH_NAME_ERROR_VALUE);
-        } catch (InvalidPoolNameError ioe) {
-          builder.addResults(AddPathCacheDirectiveErrorProto.
-              INVALID_POOL_NAME_ERROR_VALUE);
         } catch (IOException ioe) {
-          builder.addResults(AddPathCacheDirectiveErrorProto.
-              UNEXPECTED_ADD_ERROR_VALUE);
+          if (ioe.getCause() instanceof EmptyPathError) {
+            builder.addResults(AddPathBasedCacheDirectiveErrorProto.
+                EMPTY_PATH_ERROR_VALUE);
+          } else if (ioe.getCause() instanceof InvalidPathNameError) {
+            builder.addResults(AddPathBasedCacheDirectiveErrorProto.
+                INVALID_PATH_NAME_ERROR_VALUE);
+          } else if (ioe.getCause() instanceof InvalidPoolNameError) {
+            builder.addResults(AddPathBasedCacheDirectiveErrorProto.
+                INVALID_POOL_NAME_ERROR_VALUE);
+          } else if (ioe.getCause() instanceof PoolWritePermissionDeniedError) {
+            builder.addResults(AddPathBasedCacheDirectiveErrorProto.
+                ADD_PERMISSION_DENIED_ERROR_VALUE);
+          } else {
+            builder.addResults(AddPathBasedCacheDirectiveErrorProto.
+                UNEXPECTED_ADD_ERROR_VALUE);
+          }
         }
       }
       return builder.build();
@@ -1076,29 +1081,29 @@ public class ClientNamenodeProtocolServerSideTranslatorPB implements
   }
 
   @Override
-  public RemovePathCacheEntriesResponseProto removePathCacheEntries(
-      RpcController controller, RemovePathCacheEntriesRequestProto request)
+  public RemovePathBasedCacheEntriesResponseProto removePathBasedCacheEntries(
+      RpcController controller, RemovePathBasedCacheEntriesRequestProto request)
       throws ServiceException {
     try {
       List<Fallible<Long>> output =
-         server.removePathCacheEntries(request.getElementsList());
-      RemovePathCacheEntriesResponseProto.Builder builder =
-         RemovePathCacheEntriesResponseProto.newBuilder();
+         server.removePathBasedCacheEntries(request.getElementsList());
+      RemovePathBasedCacheEntriesResponseProto.Builder builder =
+         RemovePathBasedCacheEntriesResponseProto.newBuilder();
       for (int idx = 0; idx < output.size(); idx++) {
         try {
           long id = output.get(idx).get();
           builder.addResults(id);
         } catch (InvalidIdException ioe) {
-          builder.addResults(RemovePathCacheEntryErrorProto.
+          builder.addResults(RemovePathBasedCacheEntryErrorProto.
               INVALID_CACHED_PATH_ID_ERROR_VALUE);
         } catch (NoSuchIdException ioe) {
-          builder.addResults(RemovePathCacheEntryErrorProto.
+          builder.addResults(RemovePathBasedCacheEntryErrorProto.
               NO_SUCH_CACHED_PATH_ID_ERROR_VALUE);
         } catch (RemovePermissionDeniedException ioe) {
-          builder.addResults(RemovePathCacheEntryErrorProto.
+          builder.addResults(RemovePathBasedCacheEntryErrorProto.
               REMOVE_PERMISSION_DENIED_ERROR_VALUE);
         } catch (IOException ioe) {
-          builder.addResults(RemovePathCacheEntryErrorProto.
+          builder.addResults(RemovePathBasedCacheEntryErrorProto.
               UNEXPECTED_REMOVE_ERROR_VALUE);
         }
       }
@@ -1109,20 +1114,32 @@ public class ClientNamenodeProtocolServerSideTranslatorPB implements
   }
 
   @Override
-  public ListPathCacheEntriesResponseProto listPathCacheEntries(RpcController controller,
-      ListPathCacheEntriesRequestProto request) throws ServiceException {
+  public ListPathBasedCacheEntriesResponseProto listPathBasedCacheEntries(
+      RpcController controller, ListPathBasedCacheEntriesRequestProto request)
+          throws ServiceException {
     try {
-      RemoteIterator<PathCacheEntry> iter =
-         server.listPathCacheEntries(request.getPrevId(), request.getPool());
-      ListPathCacheEntriesResponseProto.Builder builder =
-          ListPathCacheEntriesResponseProto.newBuilder();
+      RemoteIterator<PathBasedCacheEntry> iter =
+         server.listPathBasedCacheEntries(request.getPrevId(),
+             request.hasPool() ? request.getPool() : null,
+             request.hasPath() ? request.getPath() : null);
+      ListPathBasedCacheEntriesResponseProto.Builder builder =
+          ListPathBasedCacheEntriesResponseProto.newBuilder();
+      long prevId = 0;
       while (iter.hasNext()) {
-        PathCacheEntry entry = iter.next();
+        PathBasedCacheEntry entry = iter.next();
         builder.addElements(
-            ListPathCacheEntriesElementProto.newBuilder().
+            ListPathBasedCacheEntriesElementProto.newBuilder().
               setId(entry.getEntryId()).
               setPath(entry.getDirective().getPath()).
               setPool(entry.getDirective().getPool()));
+        prevId = entry.getEntryId();
+      }
+      if (prevId == 0) {
+        builder.setHasMore(false);
+      } else {
+        iter = server.listPathBasedCacheEntries(prevId, request.getPool(),
+            request.getPath());
+        builder.setHasMore(iter.hasNext());
       }
       return builder.build();
     } catch (IOException e) {
@@ -1199,6 +1216,7 @@ public class ClientNamenodeProtocolServerSideTranslatorPB implements
         server.listCachePools(request.getPrevPoolName());
       ListCachePoolsResponseProto.Builder responseBuilder =
         ListCachePoolsResponseProto.newBuilder();
+      String prevPoolName = null;
       while (iter.hasNext()) {
         CachePoolInfo pool = iter.next();
         ListCachePoolsResponseElementProto.Builder elemBuilder = 
@@ -1217,6 +1235,14 @@ public class ClientNamenodeProtocolServerSideTranslatorPB implements
           elemBuilder.setWeight(pool.getWeight());
         }
         responseBuilder.addElements(elemBuilder.build());
+        prevPoolName = pool.getPoolName();
+      }
+      // fill in hasNext
+      if (prevPoolName == null) {
+        responseBuilder.setHasMore(false);
+      } else {
+        iter = server.listCachePools(prevPoolName);
+        responseBuilder.setHasMore(iter.hasNext());
       }
       return responseBuilder.build();
     } catch (IOException e) {

+ 82 - 80
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java

@@ -23,7 +23,6 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
-import java.util.NoSuchElementException;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -39,16 +38,16 @@ import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.fs.UnresolvedLinkException;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-import org.apache.hadoop.hdfs.protocol.PathCacheDirective;
-import org.apache.hadoop.hdfs.protocol.PathCacheEntry;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.EmptyPathError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPathNameError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPoolNameError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.UnexpectedAddPathCacheDirectiveException;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.InvalidIdException;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.NoSuchIdException;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.RemovePermissionDeniedException;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.UnexpectedRemovePathCacheEntryException;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.EmptyPathError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPathNameError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPoolNameError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.UnexpectedAddPathBasedCacheDirectiveException;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.InvalidIdException;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.NoSuchIdException;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.RemovePermissionDeniedException;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.UnexpectedRemovePathBasedCacheEntryException;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks;
 import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException;
@@ -68,10 +67,10 @@ import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AbandonBlockRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddBlockRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddCachePoolRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.PathCacheDirectiveProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathCacheDirectiveErrorProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathCacheDirectivesRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathCacheDirectivesResponseProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.PathBasedCacheDirectiveProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathBasedCacheDirectiveErrorProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathBasedCacheDirectivesRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathBasedCacheDirectivesResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AllowSnapshotRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AppendRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AppendResponseProto;
@@ -109,10 +108,10 @@ import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetSna
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetSnapshottableDirListingRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetSnapshottableDirListingResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.IsFileClosedRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathCacheEntriesElementProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathCacheEntriesRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathCacheEntriesRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathCacheEntriesResponseProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheEntriesElementProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheEntriesRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheEntriesRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheEntriesResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCachePoolsRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCachePoolsResponseElementProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCachePoolsResponseProto;
@@ -122,9 +121,9 @@ import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.Mkdirs
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ModifyCachePoolRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RecoverLeaseRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RefreshNodesRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathCacheEntriesRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathCacheEntriesResponseProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathCacheEntryErrorProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheEntriesRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheEntriesResponseProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheEntryErrorProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemoveCachePoolRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.Rename2RequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RenameRequestProto;
@@ -1018,47 +1017,47 @@ public class ClientNamenodeProtocolTranslatorPB implements
     }
   }
 
-  private static IOException addPathCacheDirectivesError(long code,
-      PathCacheDirective directive) {
-    if (code == AddPathCacheDirectiveErrorProto.EMPTY_PATH_ERROR_VALUE) {
+  private static IOException addPathBasedCacheDirectivesError(long code,
+      PathBasedCacheDirective directive) {
+    if (code == AddPathBasedCacheDirectiveErrorProto.EMPTY_PATH_ERROR_VALUE) {
       return new EmptyPathError(directive);
-    } else if (code == AddPathCacheDirectiveErrorProto.
+    } else if (code == AddPathBasedCacheDirectiveErrorProto.
         INVALID_PATH_NAME_ERROR_VALUE) {
       return new InvalidPathNameError(directive);
-    } else if (code == AddPathCacheDirectiveErrorProto.
+    } else if (code == AddPathBasedCacheDirectiveErrorProto.
         INVALID_POOL_NAME_ERROR_VALUE) {
       return new InvalidPoolNameError(directive);
     } else {
-      return new UnexpectedAddPathCacheDirectiveException(directive);
+      return new UnexpectedAddPathBasedCacheDirectiveException(directive);
     }
   }
   
   @Override
-  public List<Fallible<PathCacheEntry>> addPathCacheDirectives(
-        List<PathCacheDirective> directives) throws IOException {
+  public List<Fallible<PathBasedCacheEntry>> addPathBasedCacheDirectives(
+        List<PathBasedCacheDirective> directives) throws IOException {
     try {
-      AddPathCacheDirectivesRequestProto.Builder builder =
-          AddPathCacheDirectivesRequestProto.newBuilder();
-      for (PathCacheDirective directive : directives) {
-        builder.addElements(PathCacheDirectiveProto.newBuilder().
+      AddPathBasedCacheDirectivesRequestProto.Builder builder =
+          AddPathBasedCacheDirectivesRequestProto.newBuilder();
+      for (PathBasedCacheDirective directive : directives) {
+        builder.addElements(PathBasedCacheDirectiveProto.newBuilder().
             setPath(directive.getPath()).
             setPool(directive.getPool()).
             build());
       }
-      AddPathCacheDirectivesResponseProto result = 
-          rpcProxy.addPathCacheDirectives(null, builder.build());
+      AddPathBasedCacheDirectivesResponseProto result = 
+          rpcProxy.addPathBasedCacheDirectives(null, builder.build());
       int resultsCount = result.getResultsCount();
-      ArrayList<Fallible<PathCacheEntry>> results = 
-          new ArrayList<Fallible<PathCacheEntry>>(resultsCount);
+      ArrayList<Fallible<PathBasedCacheEntry>> results = 
+          new ArrayList<Fallible<PathBasedCacheEntry>>(resultsCount);
       for (int i = 0; i < resultsCount; i++) {
-        PathCacheDirective directive = directives.get(i);
+        PathBasedCacheDirective directive = directives.get(i);
         long code = result.getResults(i);
         if (code > 0) {
-          results.add(new Fallible<PathCacheEntry>(
-                new PathCacheEntry(code, directive)));
+          results.add(new Fallible<PathBasedCacheEntry>(
+                new PathBasedCacheEntry(code, directive)));
         } else {
-          results.add(new Fallible<PathCacheEntry>(
-                addPathCacheDirectivesError(code, directive))); 
+          results.add(new Fallible<PathBasedCacheEntry>(
+                addPathBasedCacheDirectivesError(code, directive))); 
         }
       }
       return results;
@@ -1067,32 +1066,32 @@ public class ClientNamenodeProtocolTranslatorPB implements
     }
   }
   
-  private static IOException removePathCacheEntriesError(long code, long id) {
-    if (code == RemovePathCacheEntryErrorProto.
+  private static IOException removePathBasedCacheEntriesError(long code, long id) {
+    if (code == RemovePathBasedCacheEntryErrorProto.
         INVALID_CACHED_PATH_ID_ERROR_VALUE) {
       return new InvalidIdException(id);
-    } else if (code == RemovePathCacheEntryErrorProto.
+    } else if (code == RemovePathBasedCacheEntryErrorProto.
         NO_SUCH_CACHED_PATH_ID_ERROR_VALUE) {
       return new NoSuchIdException(id);
-    } else if (code == RemovePathCacheEntryErrorProto.
+    } else if (code == RemovePathBasedCacheEntryErrorProto.
         REMOVE_PERMISSION_DENIED_ERROR_VALUE) {
       return new RemovePermissionDeniedException(id);
     } else {
-      return new UnexpectedRemovePathCacheEntryException(id);
+      return new UnexpectedRemovePathBasedCacheEntryException(id);
     }
   }
 
   @Override
-  public List<Fallible<Long>> removePathCacheEntries(List<Long> ids)
+  public List<Fallible<Long>> removePathBasedCacheEntries(List<Long> ids)
       throws IOException {
     try {
-      RemovePathCacheEntriesRequestProto.Builder builder =
-          RemovePathCacheEntriesRequestProto.newBuilder();
+      RemovePathBasedCacheEntriesRequestProto.Builder builder =
+          RemovePathBasedCacheEntriesRequestProto.newBuilder();
       for (Long id : ids) {
         builder.addElements(id);
       }
-      RemovePathCacheEntriesResponseProto result = 
-          rpcProxy.removePathCacheEntries(null, builder.build());
+      RemovePathBasedCacheEntriesResponseProto result = 
+          rpcProxy.removePathBasedCacheEntries(null, builder.build());
       int resultsCount = result.getResultsCount();
       ArrayList<Fallible<Long>> results = 
           new ArrayList<Fallible<Long>>(resultsCount);
@@ -1102,7 +1101,7 @@ public class ClientNamenodeProtocolTranslatorPB implements
           results.add(new Fallible<Long>(code));
         } else {
           results.add(new Fallible<Long>(
-              removePathCacheEntriesError(code, ids.get(i))));
+              removePathBasedCacheEntriesError(code, ids.get(i))));
         }
       }
       return results;
@@ -1111,20 +1110,20 @@ public class ClientNamenodeProtocolTranslatorPB implements
     }
   }
 
-  private static class BatchedPathCacheEntries
-      implements BatchedEntries<PathCacheEntry> {
-    private ListPathCacheEntriesResponseProto response;
+  private static class BatchedPathBasedCacheEntries
+      implements BatchedEntries<PathBasedCacheEntry> {
+    private ListPathBasedCacheEntriesResponseProto response;
 
-    BatchedPathCacheEntries(ListPathCacheEntriesResponseProto response) {
+    BatchedPathBasedCacheEntries(ListPathBasedCacheEntriesResponseProto response) {
       this.response = response;
     }
 
     @Override
-    public PathCacheEntry get(int i) {
-      ListPathCacheEntriesElementProto elementProto =
+    public PathBasedCacheEntry get(int i) {
+      ListPathBasedCacheEntriesElementProto elementProto =
         response.getElements(i);
-      return new PathCacheEntry(elementProto.getId(), 
-          new PathCacheDirective(elementProto.getPath(),
+      return new PathBasedCacheEntry(elementProto.getId(), 
+          new PathBasedCacheDirective(elementProto.getPath(),
               elementProto.getPool()));
     }
 
@@ -1139,45 +1138,48 @@ public class ClientNamenodeProtocolTranslatorPB implements
     }
   }
 
-  private class PathCacheEntriesIterator
-      extends BatchedRemoteIterator<Long, PathCacheEntry> {
+  private class PathBasedCacheEntriesIterator
+      extends BatchedRemoteIterator<Long, PathBasedCacheEntry> {
     private final String pool;
+    private final String path;
 
-    public PathCacheEntriesIterator(long prevKey, String pool) {
+    public PathBasedCacheEntriesIterator(long prevKey, String pool, String path) {
       super(prevKey);
       this.pool = pool;
+      this.path = path;
     }
 
     @Override
-    public BatchedEntries<PathCacheEntry> makeRequest(
+    public BatchedEntries<PathBasedCacheEntry> makeRequest(
         Long nextKey) throws IOException {
-      ListPathCacheEntriesResponseProto response;
+      ListPathBasedCacheEntriesResponseProto response;
       try {
-        ListPathCacheEntriesRequestProto req =
-            ListPathCacheEntriesRequestProto.newBuilder().
-              setPrevId(nextKey).
-              setPool(pool).
-              build();
-        response = rpcProxy.listPathCacheEntries(null, req);
-        if (response.getElementsCount() == 0) {
-          response = null;
+        ListPathBasedCacheEntriesRequestProto.Builder builder =
+            ListPathBasedCacheEntriesRequestProto.newBuilder().setPrevId(nextKey);
+        if (pool != null) {
+          builder.setPool(pool);
         }
+        if (path != null) {
+          builder.setPath(path);
+        }
+        ListPathBasedCacheEntriesRequestProto req = builder.build();
+        response = rpcProxy.listPathBasedCacheEntries(null, req);
       } catch (ServiceException e) {
         throw ProtobufHelper.getRemoteException(e);
       }
-      return new BatchedPathCacheEntries(response);
+      return new BatchedPathBasedCacheEntries(response);
     }
 
     @Override
-    public Long elementToPrevKey(PathCacheEntry element) {
+    public Long elementToPrevKey(PathBasedCacheEntry element) {
       return element.getEntryId();
     }
   }
 
   @Override
-  public RemoteIterator<PathCacheEntry> listPathCacheEntries(long prevId,
-      String pool) throws IOException {
-    return new PathCacheEntriesIterator(prevId, pool);
+  public RemoteIterator<PathBasedCacheEntry> listPathBasedCacheEntries(long prevId,
+      String pool, String path) throws IOException {
+    return new PathBasedCacheEntriesIterator(prevId, pool, path);
   }
 
   @Override

+ 62 - 51
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java

@@ -35,16 +35,17 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries;
 import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
-import org.apache.hadoop.hdfs.protocol.PathCacheDirective;
-import org.apache.hadoop.hdfs.protocol.PathCacheEntry;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPoolNameError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.UnexpectedAddPathCacheDirectiveException;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.PoolWritePermissionDeniedError;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.InvalidIdException;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.NoSuchIdException;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.UnexpectedRemovePathCacheEntryException;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.RemovePermissionDeniedException;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPoolNameError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.UnexpectedAddPathBasedCacheDirectiveException;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.PoolWritePermissionDeniedError;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.InvalidIdException;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.NoSuchIdException;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.UnexpectedRemovePathBasedCacheEntryException;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.RemovePermissionDeniedException;
 import org.apache.hadoop.util.Fallible;
 
 /**
@@ -56,17 +57,17 @@ final class CacheManager {
   /**
    * Cache entries, sorted by ID.
    *
-   * listPathCacheEntries relies on the ordering of elements in this map 
+   * listPathBasedCacheEntries relies on the ordering of elements in this map 
    * to track what has already been listed by the client.
    */
-  private final TreeMap<Long, PathCacheEntry> entriesById =
-      new TreeMap<Long, PathCacheEntry>();
+  private final TreeMap<Long, PathBasedCacheEntry> entriesById =
+      new TreeMap<Long, PathBasedCacheEntry>();
 
   /**
    * Cache entries, sorted by directive.
    */
-  private final TreeMap<PathCacheDirective, PathCacheEntry> entriesByDirective =
-      new TreeMap<PathCacheDirective, PathCacheEntry>();
+  private final TreeMap<PathBasedCacheDirective, PathBasedCacheEntry> entriesByDirective =
+      new TreeMap<PathBasedCacheDirective, PathBasedCacheEntry>();
 
   /**
    * Cache pools, sorted by name.
@@ -114,53 +115,53 @@ final class CacheManager {
     return nextEntryId++;
   }
 
-  private synchronized Fallible<PathCacheEntry> addDirective(
-        PathCacheDirective directive, FSPermissionChecker pc) {
+  private synchronized Fallible<PathBasedCacheEntry> addDirective(
+        PathBasedCacheDirective directive, FSPermissionChecker pc) {
     CachePool pool = cachePools.get(directive.getPool());
     if (pool == null) {
       LOG.info("addDirective " + directive + ": pool not found.");
-      return new Fallible<PathCacheEntry>(
+      return new Fallible<PathBasedCacheEntry>(
           new InvalidPoolNameError(directive));
     }
     if ((pc != null) && (!pc.checkPermission(pool, FsAction.WRITE))) {
       LOG.info("addDirective " + directive + ": write permission denied.");
-      return new Fallible<PathCacheEntry>(
+      return new Fallible<PathBasedCacheEntry>(
           new PoolWritePermissionDeniedError(directive));
     }
     try {
       directive.validate();
     } catch (IOException ioe) {
       LOG.info("addDirective " + directive + ": validation failed.");
-      return new Fallible<PathCacheEntry>(ioe);
+      return new Fallible<PathBasedCacheEntry>(ioe);
     }
     // Check if we already have this entry.
-    PathCacheEntry existing = entriesByDirective.get(directive);
+    PathBasedCacheEntry existing = entriesByDirective.get(directive);
     if (existing != null) {
       // Entry already exists: return existing entry.
       LOG.info("addDirective " + directive + ": there is an " +
           "existing directive " + existing);
-      return new Fallible<PathCacheEntry>(existing);
+      return new Fallible<PathBasedCacheEntry>(existing);
     }
     // Add a new entry with the next available ID.
-    PathCacheEntry entry;
+    PathBasedCacheEntry entry;
     try {
-      entry = new PathCacheEntry(getNextEntryId(), directive);
+      entry = new PathBasedCacheEntry(getNextEntryId(), directive);
     } catch (IOException ioe) {
-      return new Fallible<PathCacheEntry>(
-          new UnexpectedAddPathCacheDirectiveException(directive));
+      return new Fallible<PathBasedCacheEntry>(
+          new UnexpectedAddPathBasedCacheDirectiveException(directive));
     }
     LOG.info("addDirective " + directive + ": added cache directive "
         + directive);
     entriesByDirective.put(directive, entry);
     entriesById.put(entry.getEntryId(), entry);
-    return new Fallible<PathCacheEntry>(entry);
+    return new Fallible<PathBasedCacheEntry>(entry);
   }
 
-  public synchronized List<Fallible<PathCacheEntry>> addDirectives(
-      List<PathCacheDirective> directives, FSPermissionChecker pc) {
-    ArrayList<Fallible<PathCacheEntry>> results = 
-        new ArrayList<Fallible<PathCacheEntry>>(directives.size());
-    for (PathCacheDirective directive: directives) {
+  public synchronized List<Fallible<PathBasedCacheEntry>> addDirectives(
+      List<PathBasedCacheDirective> directives, FSPermissionChecker pc) {
+    ArrayList<Fallible<PathBasedCacheEntry>> results = 
+        new ArrayList<Fallible<PathBasedCacheEntry>>(directives.size());
+    for (PathBasedCacheDirective directive: directives) {
       results.add(addDirective(directive, pc));
     }
     return results;
@@ -174,7 +175,7 @@ final class CacheManager {
       return new Fallible<Long>(new InvalidIdException(entryId));
     }
     // Find the entry.
-    PathCacheEntry existing = entriesById.get(entryId);
+    PathBasedCacheEntry existing = entriesById.get(entryId);
     if (existing == null) {
       LOG.info("removeEntry " + entryId + ": entry not found.");
       return new Fallible<Long>(new NoSuchIdException(entryId));
@@ -184,7 +185,7 @@ final class CacheManager {
       LOG.info("removeEntry " + entryId + ": pool not found for directive " +
         existing.getDirective());
       return new Fallible<Long>(
-          new UnexpectedRemovePathCacheEntryException(entryId));
+          new UnexpectedRemovePathBasedCacheEntryException(entryId));
     }
     if ((pc != null) && (!pc.checkPermission(pool, FsAction.WRITE))) {
       LOG.info("removeEntry " + entryId + ": write permission denied to " +
@@ -198,7 +199,7 @@ final class CacheManager {
       LOG.warn("removeEntry " + entryId + ": failed to find existing entry " +
           existing + " in entriesByDirective");
       return new Fallible<Long>(
-          new UnexpectedRemovePathCacheEntryException(entryId));
+          new UnexpectedRemovePathBasedCacheEntryException(entryId));
     }
     entriesById.remove(entryId);
     return new Fallible<Long>(entryId);
@@ -214,33 +215,44 @@ final class CacheManager {
     return results;
   }
 
-  public synchronized BatchedListEntries<PathCacheEntry> 
-        listPathCacheEntries(long prevId, String filterPool, FSPermissionChecker pc) {
+  public synchronized BatchedListEntries<PathBasedCacheEntry> 
+        listPathBasedCacheEntries(long prevId, String filterPool,
+            String filterPath, FSPermissionChecker pc) throws IOException {
     final int NUM_PRE_ALLOCATED_ENTRIES = 16;
-    ArrayList<PathCacheEntry> replies =
-        new ArrayList<PathCacheEntry>(NUM_PRE_ALLOCATED_ENTRIES);
+    if (filterPath != null) {
+      if (!DFSUtil.isValidName(filterPath)) {
+        throw new IOException("invalid path name '" + filterPath + "'");
+      }
+    }
+    ArrayList<PathBasedCacheEntry> replies =
+        new ArrayList<PathBasedCacheEntry>(NUM_PRE_ALLOCATED_ENTRIES);
     int numReplies = 0;
-    SortedMap<Long, PathCacheEntry> tailMap = entriesById.tailMap(prevId + 1);
-    for (Entry<Long, PathCacheEntry> cur : tailMap.entrySet()) {
+    SortedMap<Long, PathBasedCacheEntry> tailMap = entriesById.tailMap(prevId + 1);
+    for (Entry<Long, PathBasedCacheEntry> cur : tailMap.entrySet()) {
       if (numReplies >= maxListCacheDirectivesResponses) {
-        return new BatchedListEntries<PathCacheEntry>(replies, true);
+        return new BatchedListEntries<PathBasedCacheEntry>(replies, true);
       }
-      PathCacheEntry curEntry = cur.getValue();
-      if (!filterPool.isEmpty() && 
-          !cur.getValue().getDirective().getPool().equals(filterPool)) {
+      PathBasedCacheEntry curEntry = cur.getValue();
+      PathBasedCacheDirective directive = cur.getValue().getDirective();
+      if (filterPool != null && 
+          !directive.getPool().equals(filterPool)) {
+        continue;
+      }
+      if (filterPath != null &&
+          !directive.getPath().equals(filterPath)) {
         continue;
       }
       CachePool pool = cachePools.get(curEntry.getDirective().getPool());
       if (pool == null) {
-        LOG.error("invalid pool for PathCacheEntry " + curEntry);
+        LOG.error("invalid pool for PathBasedCacheEntry " + curEntry);
         continue;
       }
-      if (pc.checkPermission(pool, FsAction.EXECUTE)) {
+      if (pc.checkPermission(pool, FsAction.READ)) {
         replies.add(cur.getValue());
         numReplies++;
       }
     }
-    return new BatchedListEntries<PathCacheEntry>(replies, false);
+    return new BatchedListEntries<PathBasedCacheEntry>(replies, false);
   }
 
   /**
@@ -300,8 +312,7 @@ final class CacheManager {
     }
     if (info.getMode() != null) {
       pool.setMode(info.getMode());
-      bld.append(prefix).
-        append(String.format("set mode to 0%3o", info.getMode()));
+      bld.append(prefix).append("set mode to " + info.getMode());
       prefix = "; ";
     }
     if (info.getWeight() != null) {
@@ -334,10 +345,10 @@ final class CacheManager {
     // Remove entries using this pool
     // TODO: could optimize this somewhat to avoid the need to iterate
     // over all entries in entriesByDirective
-    Iterator<Entry<PathCacheDirective, PathCacheEntry>> iter = 
+    Iterator<Entry<PathBasedCacheDirective, PathBasedCacheEntry>> iter = 
         entriesByDirective.entrySet().iterator();
     while (iter.hasNext()) {
-      Entry<PathCacheDirective, PathCacheEntry> entry = iter.next();
+      Entry<PathBasedCacheDirective, PathBasedCacheEntry> entry = iter.next();
       if (entry.getKey().getPool().equals(poolName)) {
         entriesById.remove(entry.getValue().getEntryId());
         iter.remove();

+ 9 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CachePool.java

@@ -51,6 +51,14 @@ public final class CachePool {
   @Nonnull
   private String groupName;
   
+  /**
+   * Cache pool permissions.
+   * 
+   * READ permission means that you can list the cache directives in this pool.
+   * WRITE permission means that you can add, remove, or modify cache directives
+   *       in this pool.
+   * EXECUTE permission is unused.
+   */
   @Nonnull
   private FsPermission mode;
   
@@ -74,7 +82,7 @@ public final class CachePool {
       }
       this.groupName = ugi.getPrimaryGroupName();
     } else {
-      this.groupName = ownerName;
+      this.groupName = groupName;
     }
     this.mode = mode != null ? 
         new FsPermission(mode): FsPermission.getCachePoolDefault();

+ 18 - 18
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java

@@ -143,8 +143,8 @@ import org.apache.hadoop.hdfs.HAUtil;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
 import org.apache.hadoop.hdfs.protocol.Block;
-import org.apache.hadoop.hdfs.protocol.PathCacheDirective;
-import org.apache.hadoop.hdfs.protocol.PathCacheEntry;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
@@ -6750,27 +6750,27 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
   }
 
   @SuppressWarnings("unchecked")
-  List<Fallible<PathCacheEntry>> addPathCacheDirectives(
-      List<PathCacheDirective> directives) throws IOException {
+  List<Fallible<PathBasedCacheEntry>> addPathBasedCacheDirectives(
+      List<PathBasedCacheDirective> directives) throws IOException {
     CacheEntryWithPayload retryCacheEntry =
         RetryCache.waitForCompletion(retryCache, null);
     if (retryCacheEntry != null && retryCacheEntry.isSuccess()) {
-      return (List<Fallible<PathCacheEntry>>) retryCacheEntry.getPayload();
+      return (List<Fallible<PathBasedCacheEntry>>) retryCacheEntry.getPayload();
     }
     final FSPermissionChecker pc = isPermissionEnabled ?
         getPermissionChecker() : null;
     boolean success = false;
-    List<Fallible<PathCacheEntry>> results = null;
+    List<Fallible<PathBasedCacheEntry>> results = null;
     checkOperation(OperationCategory.WRITE);
     writeLock();
     try {
       checkOperation(OperationCategory.WRITE);
       if (isInSafeMode()) {
         throw new SafeModeException(
-            "Cannot add path cache directive", safeMode);
+            "Cannot add PathBasedCache directive", safeMode);
       }
       results = cacheManager.addDirectives(directives, pc);
-      //getEditLog().logAddPathCacheDirectives(results); FIXME: HDFS-5119
+      //getEditLog().logAddPathBasedCacheDirectives(results); FIXME: HDFS-5119
       success = true;
     } finally {
       writeUnlock();
@@ -6778,7 +6778,7 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
         getEditLog().logSync();
       }
       if (isAuditEnabled() && isExternalInvocation()) {
-        logAuditEvent(success, "addPathCacheDirectives", null, null, null);
+        logAuditEvent(success, "addPathBasedCacheDirectives", null, null, null);
       }
       RetryCache.setState(retryCacheEntry, success, results);
     }
@@ -6786,7 +6786,7 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
   }
 
   @SuppressWarnings("unchecked")
-  List<Fallible<Long>> removePathCacheEntries(List<Long> ids) throws IOException {
+  List<Fallible<Long>> removePathBasedCacheEntries(List<Long> ids) throws IOException {
     CacheEntryWithPayload retryCacheEntry =
         RetryCache.waitForCompletion(retryCache, null);
     if (retryCacheEntry != null && retryCacheEntry.isSuccess()) {
@@ -6802,15 +6802,15 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
       checkOperation(OperationCategory.WRITE);
       if (isInSafeMode()) {
         throw new SafeModeException(
-            "Cannot remove path cache directives", safeMode);
+            "Cannot remove PathBasedCache directives", safeMode);
       }
       results = cacheManager.removeEntries(ids, pc);
-      //getEditLog().logRemovePathCacheEntries(results); FIXME: HDFS-5119
+      //getEditLog().logRemovePathBasedCacheEntries(results); FIXME: HDFS-5119
       success = true;
     } finally {
       writeUnlock();
       if (isAuditEnabled() && isExternalInvocation()) {
-        logAuditEvent(success, "removePathCacheEntries", null, null, null);
+        logAuditEvent(success, "removePathBasedCacheEntries", null, null, null);
       }
       RetryCache.setState(retryCacheEntry, success, results);
     }
@@ -6818,22 +6818,22 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
     return results;
   }
 
-  BatchedListEntries<PathCacheEntry> listPathCacheEntries(long startId,
-      String pool) throws IOException {
+  BatchedListEntries<PathBasedCacheEntry> listPathBasedCacheEntries(long startId,
+      String pool, String path) throws IOException {
     final FSPermissionChecker pc = isPermissionEnabled ?
         getPermissionChecker() : null;
-    BatchedListEntries<PathCacheEntry> results;
+    BatchedListEntries<PathBasedCacheEntry> results;
     checkOperation(OperationCategory.READ);
     readLock();
     boolean success = false;
     try {
       checkOperation(OperationCategory.READ);
-      results = cacheManager.listPathCacheEntries(startId, pool, pc);
+      results = cacheManager.listPathBasedCacheEntries(startId, pool, path, pc);
       success = true;
     } finally {
       readUnlock();
       if (isAuditEnabled() && isExternalInvocation()) {
-        logAuditEvent(success, "listPathCacheEntries", null, null, null);
+        logAuditEvent(success, "listPathBasedCacheEntries", null, null, null);
       }
     }
     return results;

+ 20 - 16
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java

@@ -62,8 +62,8 @@ import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.HDFSPolicyProvider;
 import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
 import org.apache.hadoop.hdfs.protocol.BlockListAsLongs;
-import org.apache.hadoop.hdfs.protocol.PathCacheDirective;
-import org.apache.hadoop.hdfs.protocol.PathCacheEntry;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
 import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
 import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
@@ -1211,43 +1211,47 @@ class NameNodeRpcServer implements NamenodeProtocols {
   }
 
   @Override
-  public List<Fallible<PathCacheEntry>> addPathCacheDirectives(
-      List<PathCacheDirective> paths) throws IOException {
-    return namesystem.addPathCacheDirectives(paths);
+  public List<Fallible<PathBasedCacheEntry>> addPathBasedCacheDirectives(
+      List<PathBasedCacheDirective> paths) throws IOException {
+    return namesystem.addPathBasedCacheDirectives(paths);
   }
 
   @Override
-  public List<Fallible<Long>> removePathCacheEntries(List<Long> ids)
+  public List<Fallible<Long>> removePathBasedCacheEntries(List<Long> ids)
       throws IOException {
-    return namesystem.removePathCacheEntries(ids);
+    return namesystem.removePathBasedCacheEntries(ids);
   }
 
-  private class ServerSidePathCacheEntriesIterator
-      extends BatchedRemoteIterator<Long, PathCacheEntry> {
+  private class ServerSidePathBasedCacheEntriesIterator
+      extends BatchedRemoteIterator<Long, PathBasedCacheEntry> {
 
     private final String pool;
 
-    public ServerSidePathCacheEntriesIterator(Long firstKey, String pool) {
+    private final String path;
+
+    public ServerSidePathBasedCacheEntriesIterator(Long firstKey, String pool,
+        String path) {
       super(firstKey);
       this.pool = pool;
+      this.path = path;
     }
 
     @Override
-    public BatchedEntries<PathCacheEntry> makeRequest(
+    public BatchedEntries<PathBasedCacheEntry> makeRequest(
         Long nextKey) throws IOException {
-      return namesystem.listPathCacheEntries(nextKey, pool);
+      return namesystem.listPathBasedCacheEntries(nextKey, pool, path);
     }
 
     @Override
-    public Long elementToPrevKey(PathCacheEntry entry) {
+    public Long elementToPrevKey(PathBasedCacheEntry entry) {
       return entry.getEntryId();
     }
   }
   
   @Override
-  public RemoteIterator<PathCacheEntry> listPathCacheEntries(long prevId,
-      String pool) throws IOException {
-    return new ServerSidePathCacheEntriesIterator(prevId, pool);
+  public RemoteIterator<PathBasedCacheEntry> listPathBasedCacheEntries(long prevId,
+      String pool, String path) throws IOException {
+    return new ServerSidePathBasedCacheEntriesIterator(prevId, pool, path);
   }
 
   @Override

+ 333 - 0
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java

@@ -0,0 +1,333 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.tools;
+
+import java.io.IOException;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.RemoteIterator;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
+import org.apache.hadoop.hdfs.tools.TableListing.Justification;
+import org.apache.hadoop.util.Fallible;
+import org.apache.hadoop.util.StringUtils;
+
+/**
+ * This class implements command-line operations on the HDFS Cache.
+ */
+@InterfaceAudience.Private
+public class CacheAdmin {
+  private static Configuration conf = new Configuration();
+
+  private static DistributedFileSystem getDFS() throws IOException {
+    FileSystem fs = FileSystem.get(conf);
+    if (!(fs instanceof DistributedFileSystem)) {
+      throw new IllegalArgumentException("FileSystem " + fs.getUri() + 
+      " is not an HDFS file system");
+    }
+    return (DistributedFileSystem)fs;
+  }
+
+  interface Command {
+    String getName();
+    String getShortUsage();
+    String getLongUsage();
+    int run(List<String> args) throws IOException;
+  }
+
+  private static class AddPathBasedCacheDirectiveCommand implements Command {
+    @Override
+    public String getName() {
+      return "-addPath";
+    }
+
+    @Override
+    public String getShortUsage() {
+      return "[-addPath -path <path> -pool <pool-name>]\n";
+    }
+
+    @Override
+    public String getLongUsage() {
+      return getShortUsage() +
+        "Adds a new PathBasedCache directive.\n" +
+        "<path>  The new path to cache.\n" + 
+        "        Paths may be either directories or files.\n" +
+        "<pool-name> The pool which this directive will reside in.\n" + 
+        "        You must have write permission on the cache pool in order\n" +
+        "        to add new entries to it.\n";
+    }
+
+    @Override
+    public int run(List<String> args) throws IOException {
+      String path = StringUtils.popOptionWithArgument("-path", args);
+      if (path == null) {
+        System.err.println("You must specify a path with -path.");
+        return 1;
+      }
+      String poolName = StringUtils.popOptionWithArgument("-pool", args);
+      if (poolName == null) {
+        System.err.println("You must specify a pool name with -pool.");
+        return 1;
+      }
+      if (!args.isEmpty()) {
+        System.err.println("Can't understand argument: " + args.get(0));
+        return 1;
+      }
+        
+      DistributedFileSystem dfs = getDFS();
+      List<PathBasedCacheDirective> directives =
+          new LinkedList<PathBasedCacheDirective>();
+      PathBasedCacheDirective directive = new PathBasedCacheDirective(path, poolName);
+      directives.add(directive);
+      List<Fallible<PathBasedCacheEntry>> results =
+          dfs.addPathBasedCacheDirective(directives);
+      try {
+        PathBasedCacheEntry entry = results.get(0).get();
+        System.out.println("Added PathBasedCache entry " + entry.getEntryId());
+        return 0;
+      } catch (IOException e) {
+        System.err.println("Error adding cache directive " + directive + ": " +
+          e.getMessage());
+        return 1;
+      }
+    }
+  }
+
+  private static class RemovePathBasedCacheDirectiveCommand implements Command {
+    @Override
+    public String getName() {
+      return "-removePath";
+    }
+
+    @Override
+    public String getShortUsage() {
+      return "[-removePath <id>]\n";
+    }
+
+    @Override
+    public String getLongUsage() {
+      return getShortUsage() +
+        "Remove a cache directive.\n" +
+        "<id>    The id of the cache directive to remove.\n" + 
+        "        You must have write permission on the pool where the\n" +
+        "        directive resides in order to remove it.  To see a list\n" +
+        "        of PathBasedCache directive IDs, use the -list command.\n";
+    }
+
+    @Override
+    public int run(List<String> args) throws IOException {
+      String idString= StringUtils.popFirstNonOption(args);
+      if (idString == null) {
+        System.err.println("You must specify a directive ID to remove.");
+        return 1;
+      }
+      long id = Long.valueOf(idString);
+      if (id <= 0) {
+        System.err.println("Invalid directive ID " + id + ": ids must " +
+            "be greater than 0.");
+        return 1;
+      }
+      if (!args.isEmpty()) {
+        System.err.println("Can't understand argument: " + args.get(0));
+        return 1;
+      }
+      DistributedFileSystem dfs = getDFS();
+      List<Long> ids = new LinkedList<Long>();
+      ids.add(id);
+      List<Fallible<Long>> results = dfs.removePathBasedCacheEntries(ids);
+      try {
+        Long resultId = results.get(0).get();
+        System.out.println("Removed PathBasedCache entry " + resultId);
+        return 0;
+      } catch (IOException e) {
+        System.err.println("Error removing cache directive " + id + ": " +
+          e.getMessage());
+        return 1;
+      }
+    }
+  }
+
+  private static class ListPathBasedCacheDirectiveCommand implements Command {
+    @Override
+    public String getName() {
+      return "-listPaths";
+    }
+
+    @Override
+    public String getShortUsage() {
+      return "[-listPaths [-path <path>] [-pool <pool-name>]]\n";
+    }
+
+    @Override
+    public String getLongUsage() {
+      return getShortUsage() +
+        "List PathBasedCache directives.\n" +
+        "<path> If a -path argument is given, we will list only\n" +
+        "        PathBasedCache entries with this path.\n" +
+        "        Note that if there is a PathBasedCache directive for <path>\n" +
+        "        in a cache pool that we don't have read access for, it\n" + 
+        "        not be listed.  If there are unreadable cache pools, a\n" +
+        "        message will be printed.\n" +
+        "        may be incomplete.\n" +
+        "<pool-name> If a -pool argument is given, we will list only path\n" +
+        "        cache entries in that pool.\n";
+    }
+
+    @Override
+    public int run(List<String> args) throws IOException {
+      String pathFilter = StringUtils.popOptionWithArgument("-path", args);
+      String poolFilter = StringUtils.popOptionWithArgument("-pool", args);
+      if (!args.isEmpty()) {
+        System.err.println("Can't understand argument: " + args.get(0));
+        return 1;
+      }
+      TableListing tableListing = new TableListing.Builder().
+          addField("ID", Justification.RIGHT).
+          addField("POOL", Justification.LEFT).
+          addField("PATH", Justification.LEFT).
+          build();
+      DistributedFileSystem dfs = getDFS();
+      RemoteIterator<PathBasedCacheEntry> iter =
+          dfs.listPathBasedCacheEntries(poolFilter, pathFilter);
+      int numEntries = 0;
+      while (iter.hasNext()) {
+        PathBasedCacheEntry entry = iter.next();
+        String row[] = new String[] {
+            "" + entry.getEntryId(),
+            entry.getDirective().getPool(),
+            entry.getDirective().getPath(),
+        };
+        tableListing.addRow(row);
+        numEntries++;
+      }
+      System.out.print(String.format("Found %d entr%s\n",
+          numEntries, numEntries == 1 ? "y" : "ies"));
+      if (numEntries > 0) {
+        System.out.print(tableListing.build());
+      }
+      return 0;
+    }
+  }
+
+  private static class HelpCommand implements Command {
+    @Override
+    public String getName() {
+      return "-help";
+    }
+
+    @Override
+    public String getShortUsage() {
+      return "[-help <command-name>]\n";
+    }
+
+    @Override
+    public String getLongUsage() {
+      return getShortUsage() +
+        "Get detailed help about a command.\n" +
+        "<command-name> The command to get detailed help for.  If no " +
+        "        command-name is specified, we will print detailed help " +
+        "        about all commands";
+    }
+
+    @Override
+    public int run(List<String> args) throws IOException {
+      if (args.size() == 0) {
+        for (Command command : COMMANDS) {
+          System.err.println(command.getLongUsage());
+        }
+        return 0;
+      }
+      if (args.size() != 1) {
+        System.out.println("You must give exactly one argument to -help.");
+        return 0;
+      }
+      String commandName = args.get(0);
+      commandName.replaceAll("^[-]*", "");
+      Command command = determineCommand(commandName);
+      if (command == null) {
+        System.err.print("Sorry, I don't know the command '" +
+          commandName + "'.\n");
+        System.err.print("Valid command names are:\n");
+        String separator = "";
+        for (Command c : COMMANDS) {
+          System.err.print(separator + c.getName());
+          separator = ", ";
+        }
+        return 1;
+      }
+      System.err.print(command.getLongUsage());
+      return 0;
+    }
+  }
+
+  private static Command[] COMMANDS = {
+    new AddPathBasedCacheDirectiveCommand(),
+    new RemovePathBasedCacheDirectiveCommand(),
+    new ListPathBasedCacheDirectiveCommand(),
+    new HelpCommand(),
+  };
+
+  private static void printUsage(boolean longUsage) {
+    System.err.println(
+        "Usage: bin/hdfs cacheadmin [COMMAND]");
+    for (Command command : COMMANDS) {
+      if (longUsage) {
+        System.err.print(command.getLongUsage());
+      } else {
+        System.err.print("          " + command.getShortUsage());
+      }
+    }
+    System.err.println();
+  }
+
+  private static Command determineCommand(String commandName) {
+    for (int i = 0; i < COMMANDS.length; i++) {
+      if (COMMANDS[i].getName().equals(commandName)) {
+        return COMMANDS[i];
+      }
+    }
+    return null;
+  }
+
+  public static void main(String[] argsArray) throws IOException {
+    if (argsArray.length == 0) {
+      printUsage(false);
+      System.exit(1);
+    }
+    Command command = determineCommand(argsArray[0]);
+    if (command == null) {
+      System.err.println("Can't understand command '" + argsArray[0] + "'");
+      if (!argsArray[0].startsWith("-")) {
+        System.err.println("Command names must start with dashes.");
+      }
+      printUsage(false);
+      System.exit(1);
+    }
+    List<String> args = new LinkedList<String>();
+    for (int j = 1; j < argsArray.length; j++) {
+      args.add(argsArray[j]);
+    }
+    System.exit(command.run(args));
+  }
+}

+ 29 - 32
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java

@@ -57,6 +57,7 @@ import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
 import org.apache.hadoop.hdfs.server.namenode.CachePool;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.apache.hadoop.hdfs.server.namenode.TransferFsImage;
+import org.apache.hadoop.hdfs.tools.TableListing.Justification;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.net.NetUtils;
@@ -634,15 +635,6 @@ public class DFSAdmin extends FsShell {
   final private static String LIST_CACHE_POOLS_USAGE =
       "-listCachePools] [-verbose] [name]";
 
-  private void listCachePool(CachePoolInfo info) {
-    System.out.print(String.format("%s\n", info.getPoolName()));
-    System.out.print(String.format("owner:\t%s\n", info.getOwnerName()));
-    System.out.print(String.format("group:\t%s\n", info.getGroupName()));
-    System.out.print(String.format("mode:\t%s\n", info.getMode()));
-    System.out.print(String.format("weight:\t%d\n", info.getWeight()));
-    System.out.print("\n");
-  }
-
   public int listCachePools(String argsArray[], int idx) throws IOException {
     List<String> args = new LinkedList<String>();
     for (int i = idx; i < argsArray.length; i++) {
@@ -655,39 +647,44 @@ public class DFSAdmin extends FsShell {
       System.err.println("usage is " + LIST_CACHE_POOLS_USAGE);
       return 1;
     }
-    boolean gotResults = false;
     DistributedFileSystem dfs = getDFS();
+    TableListing listing = new TableListing.Builder().
+        addField("NAME", Justification.LEFT).
+        addField("OWNER", Justification.LEFT).
+        addField("GROUP", Justification.LEFT).
+        addField("MODE", Justification.LEFT).
+        addField("WEIGHT", Justification.RIGHT).
+        build();
+    int numResults = 0;
     try {
       RemoteIterator<CachePoolInfo> iter = dfs.listCachePools();
-      if (name != null) {
-        while (iter.hasNext()) {
-          CachePoolInfo info = iter.next();
-          if (info.getPoolName().equals(name)) {
-            listCachePool(info);
-            gotResults = true;
-            return 0;
+      while (iter.hasNext()) {
+        CachePoolInfo info = iter.next();
+        if (name == null || info.getPoolName().equals(name)) {
+          listing.addRow(new String[] {
+              info.getPoolName(),
+              info.getOwnerName(),
+              info.getGroupName(),
+              info.getMode().toString(),
+              info.getWeight().toString(),
+          });
+          ++numResults;
+          if (name != null) {
+            break;
           }
         }
-      } else {
-        while (iter.hasNext()) {
-          listCachePool(iter.next());
-          gotResults = true;
-        }
       }
     } catch (IOException e) {
       throw new RemoteException(e.getClass().getName(), e.getMessage());
     }
-    int ret = 0;
-    if (!gotResults) {
-      if (name != null) {
-        System.out.println("No cache pool named " + name + " found.");
-        ret = 1;
-      } else {
-        System.out.println("No cache pools found.");
-        ret = 1;
-      }
+    System.out.print(String.format("Found %d result%s.\n", numResults,
+        (numResults == 1 ? "" : "s")));
+    if (numResults > 0) { 
+      System.out.print(listing.build());
     }
-    return ret;
+    // If there are no results, we return 1 (failure exit code);
+    // otherwise we return 0 (success exit code).
+    return (numResults == 0) ? 1 : 0;
   }
 
   public int rollEdits() throws IOException {

+ 137 - 0
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/TableListing.java

@@ -0,0 +1,137 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.tools;
+
+import java.util.LinkedList;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.classification.InterfaceAudience;
+
+/**
+ * This class implements a "table listing" with column headers.
+ */
+@InterfaceAudience.Private
+public class TableListing {
+  public enum Justification {
+    LEFT,
+    RIGHT;
+  }
+
+  private static class Column {
+    private final LinkedList<String> rows;
+    private final Justification justification;
+    private int maxLength;
+
+    Column(String title, Justification justification) {
+      this.rows = new LinkedList<String>();
+      this.justification = justification;
+      this.maxLength = 0;
+      addRow(title);
+    }
+
+    private void addRow(String val) {
+      if ((val.length() + 1) > maxLength) {
+        maxLength = val.length() + 1;
+      }
+      rows.add(val);
+    }
+
+    String getRow(int i) {
+      String raw = rows.get(i);
+      int paddingLength = maxLength - raw.length();
+      String padding = (paddingLength <= 0) ? "" :
+        StringUtils.repeat(" ", paddingLength);
+      if (justification == Justification.LEFT) {
+        return raw + padding;
+      } else {
+        return padding + raw;
+      }
+    }
+  }
+
+  public static class Builder {
+    private final LinkedList<Column> columns = new LinkedList<Column>();
+
+    /**
+     * Create a new Builder.
+     */
+    public Builder() {
+    }
+
+    /**
+     * Add a new field to the Table under construction.
+     *
+     * @param title          Field title.
+     * @param leftJustified  Whether or not the field is left justified.
+     * @return               this.
+     */
+    public Builder addField(String title, Justification justification) {
+      columns.add(new Column(title, justification));
+      return this;
+    }
+
+    /**
+     * Create a new TableListing.
+     */
+    public TableListing build() {
+      return new TableListing(columns.toArray(new Column[0]));
+    }
+  }
+
+  private final Column columns[];
+
+  private int numRows;
+
+  TableListing(Column columns[]) {
+    this.columns = columns;
+    this.numRows = 0;
+  }
+
+  /**
+   * Add a new row.
+   *
+   * @param row    The row of objects to add-- one per column.
+   */
+  public void addRow(String row[]) {
+    if (row.length != columns.length) {
+      throw new RuntimeException("trying to add a row with " + row.length +
+            " columns, but we have " + columns.length + " columns.");
+    }
+    for (int i = 0; i < columns.length; i++) {
+      columns[i].addRow(row[i]);
+    }
+    numRows++;
+  }
+
+  /**
+   * Convert the table to a string.
+   */
+  public String build() {
+    StringBuilder builder = new StringBuilder();
+    for (int i = 0; i < numRows + 1; i++) {
+      String prefix = "";
+      for (int j = 0; j < columns.length; j++) {
+        builder.append(prefix);
+        prefix = " ";
+        builder.append(columns[j].getRow(i));
+      }
+      builder.append("\n");
+    }
+    return builder.toString();
+  }
+}

+ 32 - 30
hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto

@@ -363,54 +363,56 @@ message IsFileClosedResponseProto {
   required bool result = 1;
 }
 
-message PathCacheDirectiveProto {
+message PathBasedCacheDirectiveProto {
   required string path = 1;
   required string pool = 2;
 }
 
-message AddPathCacheDirectivesRequestProto {
-  repeated PathCacheDirectiveProto elements = 1;
+message AddPathBasedCacheDirectivesRequestProto {
+  repeated PathBasedCacheDirectiveProto elements = 1;
 }
 
-message AddPathCacheDirectivesResponseProto {
+message AddPathBasedCacheDirectivesResponseProto {
   repeated int64 results = 1 [packed=true];
 }
 
-enum AddPathCacheDirectiveErrorProto {
-  EMPTY_PATH_ERROR = -1;
-  INVALID_PATH_NAME_ERROR = -2;
-  INVALID_POOL_NAME_ERROR = -3;
-  UNEXPECTED_ADD_ERROR = -4;
+enum AddPathBasedCacheDirectiveErrorProto {
+  UNEXPECTED_ADD_ERROR = -1;
+  EMPTY_PATH_ERROR = -2;
+  INVALID_PATH_NAME_ERROR = -3;
+  INVALID_POOL_NAME_ERROR = -4;
+  ADD_PERMISSION_DENIED_ERROR = -5;
 }
 
-message RemovePathCacheEntriesRequestProto {
+message RemovePathBasedCacheEntriesRequestProto {
   repeated int64 elements = 1 [packed=true];
 }
 
-message RemovePathCacheEntriesResponseProto {
+message RemovePathBasedCacheEntriesResponseProto {
   repeated int64 results = 1 [packed=true];
 }
 
-enum RemovePathCacheEntryErrorProto {
-  INVALID_CACHED_PATH_ID_ERROR = -1;
-  NO_SUCH_CACHED_PATH_ID_ERROR = -2;
-  REMOVE_PERMISSION_DENIED_ERROR = -3;
-  UNEXPECTED_REMOVE_ERROR = -4;
+enum RemovePathBasedCacheEntryErrorProto {
+  UNEXPECTED_REMOVE_ERROR = -1;
+  INVALID_CACHED_PATH_ID_ERROR = -2;
+  NO_SUCH_CACHED_PATH_ID_ERROR = -3;
+  REMOVE_PERMISSION_DENIED_ERROR = -4;
 }
 
-message ListPathCacheEntriesRequestProto {
+message ListPathBasedCacheEntriesRequestProto {
   required int64 prevId = 1;
-  required string pool = 2;
+  optional string pool = 2;
+  optional string path = 3;
 }
 
-message ListPathCacheEntriesElementProto {
+message ListPathBasedCacheEntriesElementProto {
   required int64 id = 1;
-  required string path = 2;
-  required string pool = 3;
+  required string pool = 2;
+  required string path = 3;
 }
 
-message ListPathCacheEntriesResponseProto {
-  repeated ListPathCacheEntriesElementProto elements = 1;
+message ListPathBasedCacheEntriesResponseProto {
+  repeated ListPathBasedCacheEntriesElementProto elements = 1;
   required bool hasMore = 2;
 }
 
@@ -449,7 +451,7 @@ message ListCachePoolsRequestProto {
 
 message ListCachePoolsResponseProto {
   repeated ListCachePoolsResponseElementProto elements = 1;
-  optional bool hasMore = 2;
+  required bool hasMore = 2;
 }
 
 message ListCachePoolsResponseElementProto {
@@ -641,12 +643,12 @@ service ClientNamenodeProtocol {
       returns(ListCorruptFileBlocksResponseProto);
   rpc metaSave(MetaSaveRequestProto) returns(MetaSaveResponseProto);
   rpc getFileInfo(GetFileInfoRequestProto) returns(GetFileInfoResponseProto);
-  rpc addPathCacheDirectives(AddPathCacheDirectivesRequestProto)
-      returns (AddPathCacheDirectivesResponseProto);
-  rpc removePathCacheEntries(RemovePathCacheEntriesRequestProto)
-      returns (RemovePathCacheEntriesResponseProto);
-  rpc listPathCacheEntries(ListPathCacheEntriesRequestProto)
-      returns (ListPathCacheEntriesResponseProto);
+  rpc addPathBasedCacheDirectives(AddPathBasedCacheDirectivesRequestProto)
+      returns (AddPathBasedCacheDirectivesResponseProto);
+  rpc removePathBasedCacheEntries(RemovePathBasedCacheEntriesRequestProto)
+      returns (RemovePathBasedCacheEntriesResponseProto);
+  rpc listPathBasedCacheEntries(ListPathBasedCacheEntriesRequestProto)
+      returns (ListPathBasedCacheEntriesResponseProto);
   rpc addCachePool(AddCachePoolRequestProto)
       returns(AddCachePoolResponseProto);
   rpc modifyCachePool(ModifyCachePoolRequestProto)

+ 60 - 46
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathCacheRequests.java → hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathBasedCacheRequests.java

@@ -34,23 +34,23 @@ import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.EmptyPathError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPoolNameError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPathNameError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.PoolWritePermissionDeniedError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.EmptyPathError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPoolNameError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPathNameError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.PoolWritePermissionDeniedError;
 import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.InvalidIdException;
-import org.apache.hadoop.hdfs.protocol.PathCacheDirective;
-import org.apache.hadoop.hdfs.protocol.PathCacheEntry;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.NoSuchIdException;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.InvalidIdException;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.NoSuchIdException;
 import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.util.Fallible;
 import org.junit.Test;
 
-public class TestPathCacheRequests {
-  static final Log LOG = LogFactory.getLog(TestPathCacheRequests.class);
+public class TestPathBasedCacheRequests {
+  static final Log LOG = LogFactory.getLog(TestPathBasedCacheRequests.class);
 
   private static final UserGroupInformation unprivilegedUser =
       UserGroupInformation.createRemoteUser("unprivilegedUser");
@@ -101,11 +101,16 @@ public class TestPathCacheRequests {
     proto.addCachePool(new CachePoolInfo("pool1").
         setOwnerName("abc").setGroupName("123").
         setMode(new FsPermission((short)0755)).setWeight(150));
-    proto.modifyCachePool(new CachePoolInfo("pool1").
-        setOwnerName("def").setGroupName("456"));
     RemoteIterator<CachePoolInfo> iter = proto.listCachePools("");
     CachePoolInfo info = iter.next();
     assertEquals("pool1", info.getPoolName());
+    assertEquals("abc", info.getOwnerName());
+    assertEquals("123", info.getGroupName());
+    proto.modifyCachePool(new CachePoolInfo("pool1").
+        setOwnerName("def").setGroupName("456"));
+    iter = proto.listCachePools("");
+    info = iter.next();
+    assertEquals("pool1", info.getPoolName());
     assertEquals("def", info.getOwnerName());
     assertEquals("456", info.getGroupName());
     assertEquals(new FsPermission((short)0755), info.getMode());
@@ -127,16 +132,16 @@ public class TestPathCacheRequests {
   }
 
   private static void validateListAll(
-      RemoteIterator<PathCacheEntry> iter,
+      RemoteIterator<PathBasedCacheEntry> iter,
       long id0, long id1, long id2) throws Exception {
-    Assert.assertEquals(new PathCacheEntry(id0,
-        new PathCacheDirective("/alpha", "pool1")),
+    Assert.assertEquals(new PathBasedCacheEntry(id0,
+        new PathBasedCacheDirective("/alpha", "pool1")),
         iter.next());
-    Assert.assertEquals(new PathCacheEntry(id1,
-        new PathCacheDirective("/beta", "pool2")),
+    Assert.assertEquals(new PathBasedCacheEntry(id1,
+        new PathBasedCacheDirective("/beta", "pool2")),
         iter.next());
-    Assert.assertEquals(new PathCacheEntry(id2,
-        new PathCacheDirective("/gamma", "pool1")),
+    Assert.assertEquals(new PathBasedCacheEntry(id2,
+        new PathBasedCacheDirective("/gamma", "pool1")),
         iter.next());
     Assert.assertFalse(iter.hasNext());
   }
@@ -159,18 +164,19 @@ public class TestPathCacheRequests {
       proto.addCachePool(new CachePoolInfo("pool4").
           setMode(new FsPermission((short)0)));
 
-      List<Fallible<PathCacheEntry>> addResults1 = 
+      List<Fallible<PathBasedCacheEntry>> addResults1 = 
         unprivilegedUser.doAs(new PrivilegedExceptionAction<
-            List<Fallible<PathCacheEntry>>>() {
+            List<Fallible<PathBasedCacheEntry>>>() {
           @Override
-          public List<Fallible<PathCacheEntry>> run() throws IOException {
-            return proto.addPathCacheDirectives(Arrays.asList(
-              new PathCacheDirective[] {
-                new PathCacheDirective("/alpha", "pool1"),
-                new PathCacheDirective("/beta", "pool2"),
-                new PathCacheDirective("", "pool3"),
-                new PathCacheDirective("/zeta", "nonexistent_pool"),
-                new PathCacheDirective("/zeta", "pool4")
+          public List<Fallible<PathBasedCacheEntry>> run() throws IOException {
+            return proto.addPathBasedCacheDirectives(Arrays.asList(
+              new PathBasedCacheDirective[] {
+                new PathBasedCacheDirective("/alpha", "pool1"),
+                new PathBasedCacheDirective("/beta", "pool2"),
+                new PathBasedCacheDirective("", "pool3"),
+                new PathBasedCacheDirective("/zeta", "nonexistent_pool"),
+                new PathBasedCacheDirective("/zeta", "pool4"),
+                new PathBasedCacheDirective("//illegal/path/", "pool1")
               }));
             }
           });
@@ -197,28 +203,36 @@ public class TestPathCacheRequests {
         Assert.assertTrue(ioe.getCause()
             instanceof PoolWritePermissionDeniedError);
       }
+      try {
+        addResults1.get(5).get();
+        Assert.fail("expected an error when adding a malformed path " +
+            "to the cache directives.");
+      } catch (IOException ioe) {
+        //Assert.assertTrue(ioe.getCause()
+            //instanceof PoolWritePermissionDeniedError);
+      }
 
-      List<Fallible<PathCacheEntry>> addResults2 = 
-          proto.addPathCacheDirectives(Arrays.asList(
-            new PathCacheDirective[] {
-        new PathCacheDirective("/alpha", "pool1"),
-        new PathCacheDirective("/theta", ""),
-        new PathCacheDirective("bogus", "pool1"),
-        new PathCacheDirective("/gamma", "pool1")
+      List<Fallible<PathBasedCacheEntry>> addResults2 = 
+          proto.addPathBasedCacheDirectives(Arrays.asList(
+            new PathBasedCacheDirective[] {
+        new PathBasedCacheDirective("/alpha", "pool1"),
+        new PathBasedCacheDirective("/theta", ""),
+        new PathBasedCacheDirective("bogus", "pool1"),
+        new PathBasedCacheDirective("/gamma", "pool1")
       }));
       long id = addResults2.get(0).get().getEntryId();
       Assert.assertEquals("expected to get back the same ID as last time " +
-          "when re-adding an existing path cache directive.", ids1[0], id);
+          "when re-adding an existing PathBasedCache directive.", ids1[0], id);
       try {
         addResults2.get(1).get();
-        Assert.fail("expected an error when adding a path cache " +
+        Assert.fail("expected an error when adding a PathBasedCache " +
             "directive with an empty pool name.");
       } catch (IOException ioe) {
         Assert.assertTrue(ioe.getCause() instanceof InvalidPoolNameError);
       }
       try {
         addResults2.get(2).get();
-        Assert.fail("expected an error when adding a path cache " +
+        Assert.fail("expected an error when adding a PathBasedCache " +
             "directive with a non-absolute path name.");
       } catch (IOException ioe) {
         Assert.assertTrue(ioe.getCause() instanceof InvalidPathNameError);
@@ -226,20 +240,20 @@ public class TestPathCacheRequests {
       long ids2[] = new long[1];
       ids2[0] = addResults2.get(3).get().getEntryId();
 
-      RemoteIterator<PathCacheEntry> iter =
-          proto.listPathCacheEntries(0, "");
+      RemoteIterator<PathBasedCacheEntry> iter =
+          proto.listPathBasedCacheEntries(0, null, null);
       validateListAll(iter, ids1[0], ids1[1], ids2[0]);
-      iter = proto.listPathCacheEntries(0, "");
+      iter = proto.listPathBasedCacheEntries(0, null, null);
       validateListAll(iter, ids1[0], ids1[1], ids2[0]);
-      iter = proto.listPathCacheEntries(0, "pool3");
+      iter = proto.listPathBasedCacheEntries(0, "pool3", null);
       Assert.assertFalse(iter.hasNext());
-      iter = proto.listPathCacheEntries(0, "pool2");
+      iter = proto.listPathBasedCacheEntries(0, "pool2", null);
       Assert.assertEquals(addResults1.get(1).get(),
           iter.next());
       Assert.assertFalse(iter.hasNext());
 
       List<Fallible<Long>> removeResults1 = 
-          proto.removePathCacheEntries(Arrays.asList(
+          proto.removePathBasedCacheEntries(Arrays.asList(
             new Long[] { ids1[1], -42L, 999999L }));
       Assert.assertEquals(Long.valueOf(ids1[1]),
           removeResults1.get(0).get());
@@ -255,7 +269,7 @@ public class TestPathCacheRequests {
       } catch (IOException ioe) {
         Assert.assertTrue(ioe.getCause() instanceof NoSuchIdException);
       }
-      iter = proto.listPathCacheEntries(0, "pool2");
+      iter = proto.listPathBasedCacheEntries(0, "pool2", null);
       Assert.assertFalse(iter.hasNext());
     } finally {
       if (cluster != null) { cluster.shutdown(); }

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml

@@ -16385,7 +16385,7 @@
       <comparators>
         <comparator>
           <type>SubstringComparator</type>
-          <expected-output>No cache pools found.</expected-output>
+          <expected-output>Found 0 results.</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -16434,7 +16434,7 @@
       <comparators>
         <comparator>
           <type>SubstringComparator</type>
-          <expected-output>foo</expected-output>
+          <expected-output>bob    bob    rw-rw-r--      100</expected-output>
         </comparator>
       </comparators>
     </test>