Browse Source

HADOOP-2839. Remove deprecated FileSystem::globPaths.
Contributed by lohit vijayarenu.



git-svn-id: https://svn.apache.org/repos/asf/hadoop/core/trunk@644077 13f79535-47bb-0310-9956-ffa450edef68

Christopher Douglas 17 năm trước cách đây
mục cha
commit
deebd1ab6d

+ 3 - 0
CHANGES.txt

@@ -71,6 +71,9 @@ Trunk (unreleased changes)
     HADOOP-2831. Remove deprecated o.a.h.dfs.INode::getAbsoluteName()
     (lohit vijayarenu via cdouglas)
 
+    HADOOP-2839. Remove deprecated FileSystem::globPaths.
+    (lohit vijayarenu via cdouglas)
+
   NEW FEATURES
 
     HADOOP-1398.  Add HBase in-memory block cache.  (tomwhite)

+ 0 - 22
src/java/org/apache/hadoop/fs/FileSystem.java

@@ -861,28 +861,6 @@ public abstract class FileSystem extends Configured implements Closeable {
     return results;
   }
 
-  /**
-   * glob all the path names that match filePattern using the default filter
-   */
-  @Deprecated
-  public Path[] globPaths(Path filePattern) throws IOException {
-    return globPaths(filePattern, DEFAULT_FILTER);
-  }
-
-  /**
-   * glob all the path names that match filePattern and is accepted by filter.
-   */
-  @Deprecated
-  public Path[] globPaths(Path filePattern, PathFilter filter)
-      throws IOException {
-    FileStatus[] stats = globStatus(filePattern, filter);
-    if (stats == null) {
-      return new Path[]{filePattern};
-    } else {
-      return FileUtil.stat2Paths(stats);
-    }
-  }
-
   /*
    * For a path of N components, return a list of paths that match the
    * components [<code>level</code>, <code>N-1</code>].

+ 16 - 0
src/java/org/apache/hadoop/fs/FileUtil.java

@@ -48,6 +48,22 @@ public class FileUtil {
     return ret;
   }
 
+  /**
+   * convert an array of FileStatus to an array of Path.
+   * If stats if null, return path
+   * @param stats
+   *          an array of FileStatus objects
+   * @param path
+   *          default path to return in stats is null
+   * @return an array of paths corresponding to the input
+   */
+  public static Path[] stat2Paths(FileStatus[] stats, Path path) {
+    if (stats == null)
+      return new Path[]{path};
+    else
+      return stat2Paths(stats);
+  }
+  
   /**
    * Delete a directory and all its contents.  If
    * we return false, the directory may be partially-deleted.

+ 32 - 28
src/java/org/apache/hadoop/fs/FsShell.java

@@ -168,7 +168,7 @@ public class FsShell extends Configured implements Tool {
    * @param argv: arguments
    * @param pos: Ignore everything before argv[pos]  
    * @exception: IOException  
-   * @see org.apache.hadoop.fs.FileSystem.globPaths 
+   * @see org.apache.hadoop.fs.FileSystem.globStatus 
    */
   void copyToLocal(String[]argv, int pos) throws IOException {
     CommandFormat cf = new CommandFormat("copyToLocal", 2,2,"crc","ignoreCrc");
@@ -289,7 +289,7 @@ public class FsShell extends Configured implements Tool {
    * @param srcf: a file pattern specifying source files
    * @param dstf: a destination local file/directory 
    * @exception: IOException  
-   * @see org.apache.hadoop.fs.FileSystem.globPaths 
+   * @see org.apache.hadoop.fs.FileSystem.globStatus 
    */
   void copyMergeToLocal(String srcf, Path dst) throws IOException {
     copyMergeToLocal(srcf, dst, false);
@@ -307,12 +307,13 @@ public class FsShell extends Configured implements Tool {
    * @param dstf: a destination local file/directory
    * @param endline: if an end of line character is added to a text file 
    * @exception: IOException  
-   * @see org.apache.hadoop.fs.FileSystem.globPaths 
+   * @see org.apache.hadoop.fs.FileSystem.globStatus 
    */
   void copyMergeToLocal(String srcf, Path dst, boolean endline) throws IOException {
     Path srcPath = new Path(srcf);
     FileSystem srcFs = srcPath.getFileSystem(getConf());
-    Path [] srcs = srcFs.globPaths(new Path(srcf));
+    Path [] srcs = FileUtil.stat2Paths(srcFs.globStatus(srcPath), 
+                                       srcPath);
     for(int i=0; i<srcs.length; i++) {
       if (endline) {
         FileUtil.copyMerge(srcFs, srcs[i], 
@@ -337,7 +338,7 @@ public class FsShell extends Configured implements Tool {
    * their content on stdout. 
    * @param srcf: a file pattern specifying source files
    * @exception: IOException
-   * @see org.apache.hadoop.fs.FileSystem.globPaths 
+   * @see org.apache.hadoop.fs.FileSystem.globStatus 
    */
   void cat(String src, boolean verifyChecksum) throws IOException {
     //cat behavior in Linux
@@ -546,14 +547,15 @@ public class FsShell extends Configured implements Tool {
    * @param srcf a file pattern specifying source files
    * @param recursive if need to set replication factor for files in subdirs
    * @throws IOException  
-   * @see org.apache.hadoop.fs.FileSystem#globPaths(Path)
+   * @see org.apache.hadoop.fs.FileSystem#globStatus(Path)
    */
   void setReplication(short newRep, String srcf, boolean recursive,
                       List<Path> waitingList)
     throws IOException {
     Path srcPath = new Path(srcf);
     FileSystem srcFs = srcPath.getFileSystem(getConf());
-    Path[] srcs = srcFs.globPaths(new Path(srcf));
+    Path[] srcs = FileUtil.stat2Paths(srcFs.globStatus(srcPath),
+                                      srcPath);
     for(int i=0; i<srcs.length; i++) {
       setReplication(newRep, srcFs, srcs[i], recursive, waitingList);
     }
@@ -608,7 +610,7 @@ public class FsShell extends Configured implements Tool {
    * @param srcf a file pattern specifying source files
    * @param recursive if need to list files in subdirs
    * @throws IOException  
-   * @see org.apache.hadoop.fs.FileSystem#globPaths(Path)
+   * @see org.apache.hadoop.fs.FileSystem#globStatus(Path)
    */
   void ls(String srcf, boolean recursive) throws IOException {
     Path srcPath = new Path(srcf);
@@ -663,12 +665,14 @@ public class FsShell extends Configured implements Tool {
    * Show the size of all files that match the file pattern <i>src</i>
    * @param src a file pattern specifying source files
    * @throws IOException  
-   * @see org.apache.hadoop.fs.FileSystem#globPaths(Path)
+   * @see org.apache.hadoop.fs.FileSystem#globStatus(Path)
    */
   void du(String src) throws IOException {
     Path srcPath = new Path(src);
     FileSystem srcFs = srcPath.getFileSystem(getConf());
-    FileStatus items[] = srcFs.listStatus(srcFs.globPaths(srcPath));
+    Path[] pathItems = FileUtil.stat2Paths(srcFs.globStatus(srcPath), 
+                                           srcPath);
+    FileStatus items[] = srcFs.listStatus(pathItems);
     if ((items == null) || ((items.length == 0) && 
         (!srcFs.exists(srcPath)))){
       throw new FileNotFoundException("Cannot access " + src
@@ -686,7 +690,7 @@ public class FsShell extends Configured implements Tool {
    * that matches the file pattern <i>src</i>
    * @param src a file pattern specifying source files
    * @throws IOException  
-   * @see org.apache.hadoop.fs.FileSystem#globPaths(Path)
+   * @see org.apache.hadoop.fs.FileSystem#globStatus(Path)
    */
   void dus(String src) throws IOException {
     Path srcPath = new Path(src);
@@ -790,11 +794,10 @@ public class FsShell extends Configured implements Tool {
   void stat(char[] fmt, String src) throws IOException {
     Path srcPath = new Path(src);
     FileSystem srcFs = srcPath.getFileSystem(getConf());
-    Path glob[] = srcFs.globPaths(srcPath);
+    FileStatus glob[] = srcFs.globStatus(srcPath);
     if (null == glob)
       throw new IOException("cannot stat `" + src + "': No such file or directory");
-    for (Path f : glob) {
-      FileStatus st = srcFs.getFileStatus(f);
+    for (FileStatus f : glob) {
       StringBuilder buf = new StringBuilder();
       for (int i = 0; i < fmt.length; ++i) {
         if (fmt[i] != '%') {
@@ -803,25 +806,25 @@ public class FsShell extends Configured implements Tool {
           if (i + 1 == fmt.length) break;
           switch(fmt[++i]) {
             case 'b':
-              buf.append(st.getLen());
+              buf.append(f.getLen());
               break;
             case 'F':
-              buf.append(st.isDir() ? "directory" : "regular file");
+              buf.append(f.isDir() ? "directory" : "regular file");
               break;
             case 'n':
-              buf.append(f.getName());
+              buf.append(f.getPath().getName());
               break;
             case 'o':
-              buf.append(st.getBlockSize());
+              buf.append(f.getBlockSize());
               break;
             case 'r':
-              buf.append(st.getReplication());
+              buf.append(f.getReplication());
               break;
             case 'y':
-              buf.append(modifFmt.format(new Date(st.getModificationTime())));
+              buf.append(modifFmt.format(new Date(f.getModificationTime())));
               break;
             case 'Y':
-              buf.append(st.getModificationTime());
+              buf.append(f.getModificationTime());
               break;
             default:
               buf.append(fmt[i]);
@@ -841,7 +844,7 @@ public class FsShell extends Configured implements Tool {
    * @param srcf a file pattern specifying source files
    * @param dstf a destination local file/directory 
    * @throws IOException  
-   * @see org.apache.hadoop.fs.FileSystem#globPaths(Path)
+   * @see org.apache.hadoop.fs.FileSystem#globStatus(Path)
    */
   void rename(String srcf, String dstf) throws IOException {
     Path srcPath = new Path(srcf);
@@ -853,7 +856,7 @@ public class FsShell extends Configured implements Tool {
     if (srcURI.compareTo(dstURI) != 0) {
       throw new IOException("src and destination filesystems do not match.");
     }
-    Path [] srcs = srcFs.globPaths(new Path(srcf));
+    Path[] srcs = FileUtil.stat2Paths(srcFs.globStatus(srcPath), srcPath);
     Path dst = new Path(dstf);
     if (srcs.length > 1 && !srcFs.isDirectory(dst)) {
       throw new IOException("When moving multiple files, " 
@@ -953,14 +956,14 @@ public class FsShell extends Configured implements Tool {
    * @param srcf a file pattern specifying source files
    * @param dstf a destination local file/directory 
    * @throws IOException  
-   * @see org.apache.hadoop.fs.FileSystem#globPaths(Path)
+   * @see org.apache.hadoop.fs.FileSystem#globStatus(Path)
    */
   void copy(String srcf, String dstf, Configuration conf) throws IOException {
     Path srcPath = new Path(srcf);
     FileSystem srcFs = srcPath.getFileSystem(getConf());
     Path dstPath = new Path(dstf);
     FileSystem dstFs = dstPath.getFileSystem(getConf());
-    Path [] srcs = srcFs.globPaths(srcPath);
+    Path [] srcs = FileUtil.stat2Paths(srcFs.globStatus(srcPath), srcPath);
     if (srcs.length > 1 && !dstFs.isDirectory(dstPath)) {
       throw new IOException("When copying multiple files, " 
                             + "destination should be a directory.");
@@ -1036,7 +1039,7 @@ public class FsShell extends Configured implements Tool {
    * @param srcf a file pattern specifying source files
    * @param recursive if need to delete subdirs
    * @throws IOException  
-   * @see org.apache.hadoop.fs.FileSystem#globPaths(Path)
+   * @see org.apache.hadoop.fs.FileSystem#globStatus(Path)
    */
   void delete(String srcf, final boolean recursive) throws IOException {
     //rm behavior in Linux
@@ -1208,7 +1211,7 @@ public class FsShell extends Configured implements Tool {
     for (int i=startIndex; i<args.length; i++) {
       Path srcPath = new Path(args[i]);
       FileSystem srcFs = srcPath.getFileSystem(getConf());
-      Path[] paths = srcFs.globPaths(new Path(args[i]));
+      Path[] paths = FileUtil.stat2Paths(srcFs.globStatus(srcPath), srcPath);
       for(Path path : paths) {
         try {
           FileStatus file = srcFs.getFileStatus(path);
@@ -1878,7 +1881,8 @@ public class FsShell extends Configured implements Tool {
     final void globAndProcess(Path srcPattern, FileSystem srcFs
         ) throws IOException {
       List<IOException> exceptions = new ArrayList<IOException>();
-      for(Path p : srcFs.globPaths(srcPattern))
+      for(Path p : FileUtil.stat2Paths(srcFs.globStatus(srcPattern), 
+                                       srcPattern))
         try { process(p, srcFs); } 
         catch(IOException ioe) { exceptions.add(ioe); }
     

+ 7 - 6
src/java/org/apache/hadoop/mapred/FileInputFormat.java

@@ -29,6 +29,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.util.ReflectionUtils;
 
@@ -154,12 +155,11 @@ public abstract class FileInputFormat<K, V> implements InputFormat<K, V> {
 
     for (Path p: dirs) {
       FileSystem fs = p.getFileSystem(job); 
-      Path[] matches =
-        FileUtil.stat2Paths(fs.listStatus(fs.globPaths(p, inputFilter), 
-                            inputFilter));
+      FileStatus[] matches = fs.listStatus(FileUtil.stat2Paths(fs.globStatus(p,
+                                     inputFilter)), inputFilter);
 
-      for (Path match: matches) {
-        result.add(fs.makeQualified(match));
+      for (FileStatus match: matches) {
+        result.add(fs.makeQualified(match.getPath()));
       }
     }
 
@@ -191,7 +191,8 @@ public abstract class FileInputFormat<K, V> implements InputFormat<K, V> {
           }
         }
       } else {
-        Path [] paths = fs.globPaths(p, hiddenFileFilter); 
+        Path [] paths = FileUtil.stat2Paths(fs.globStatus(p, 
+                                                          hiddenFileFilter), p);
         if (paths.length == 0) {
           result.add(
                      new IOException("Input Pattern " + p + " matches 0 files")); 

+ 3 - 1
src/test/org/apache/hadoop/fs/TestGlobPaths.java

@@ -337,7 +337,9 @@ public class TestGlobPaths extends TestCase {
         throw new IOException("Mkdirs failed to create " + path[i].toString());
       }
     }
-    Path[] globResults = fs.globPaths(new Path(pattern));
+    Path patternPath = new Path(pattern);
+    Path[] globResults = FileUtil.stat2Paths(fs.globStatus(patternPath),
+                                             patternPath);
     for(int i=0; i<globResults.length; i++) {
       globResults[i] = globResults[i].makeQualified(fs);
     }