فهرست منبع

HADOOP-2567. Add FileSystem#getHomeDirectory().

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk@611333 13f79535-47bb-0310-9956-ffa450edef68
Doug Cutting 17 سال پیش
والد
کامیت
982d8f38a8

+ 7 - 0
CHANGES.txt

@@ -41,6 +41,13 @@ Trunk (unreleased changes)
     Datanode Protocol version changed from 10 to 11.  
     (Sanjay Radia via dhruba)
     
+    HADOOP-2567.  Add FileSystem#getHomeDirectory(), which returns the
+    user's home directory in a FileSystem as a fully-qualified path.
+    FileSystem#getWorkingDirectory() is also changed to return a
+    fully-qualified path, which can break applications that attempt
+    to, e.g., pass LocalFileSystem#getWorkingDir().toString() directly
+    to java.io methods that accept file names. (cutting)
+
   NEW FEATURES
 
     HADOOP-1857.  Ability to run a script when a task fails to capture stack

+ 4 - 4
src/java/org/apache/hadoop/dfs/DistributedFileSystem.java

@@ -35,8 +35,7 @@ import org.apache.hadoop.util.*;
  *
  *****************************************************************/
 public class DistributedFileSystem extends FileSystem {
-  private Path workingDir =
-    new Path("/user", System.getProperty("user.name")); 
+  private Path workingDir;
   private URI uri;
 
   DFSClient dfs;
@@ -64,6 +63,7 @@ public class DistributedFileSystem extends FileSystem {
     int port = uri.getPort();
     this.dfs = new DFSClient(new InetSocketAddress(host, port), conf);
     this.uri = URI.create("hdfs://"+host+":"+port);
+    this.workingDir = getHomeDirectory();
   }
 
   public Path getWorkingDirectory() {
@@ -87,8 +87,8 @@ public class DistributedFileSystem extends FileSystem {
   }
 
   public void setWorkingDirectory(Path dir) {
-    Path result = makeAbsolute(dir);
-    if (!FSNamesystem.isValidName(result.toString())) {
+    String result = makeAbsolute(dir).toUri().getPath();
+    if (!FSNamesystem.isValidName(result)) {
       throw new IllegalArgumentException("Invalid DFS directory name " + 
                                          result);
     }

+ 9 - 0
src/java/org/apache/hadoop/fs/FileSystem.java

@@ -863,6 +863,15 @@ public abstract class FileSystem extends Configured {
     }
   }
     
+  /** Return the current user's home directory in this filesystem.
+   * The default implementation returns "/user/$USER/".
+   */
+  public Path getHomeDirectory() {
+    return new Path("/user/"+System.getProperty("user.name"))
+      .makeQualified(this);
+  }
+
+
   /**
    * Set the current working directory for the given file system. All relative
    * paths will be resolved relative to it.

+ 5 - 0
src/java/org/apache/hadoop/fs/FilterFileSystem.java

@@ -152,6 +152,11 @@ public class FilterFileSystem extends FileSystem {
     return fs.listStatus(f);
   }
   
+  public Path getHomeDirectory() {
+    return fs.getHomeDirectory();
+  }
+
+
   /**
    * Set the current working directory for the given file system. All relative
    * paths will be resolved relative to it.

+ 9 - 3
src/java/org/apache/hadoop/fs/RawLocalFileSystem.java

@@ -36,8 +36,7 @@ import org.apache.hadoop.util.Shell;
  *****************************************************************/
 public class RawLocalFileSystem extends FileSystem {
   static final URI NAME = URI.create("file:///");
-  private Path workingDir =
-    new Path(System.getProperty("user.dir"));
+  private Path workingDir;
   TreeMap<File, FileInputStream> sharedLockDataSet =
     new TreeMap<File, FileInputStream>();
   TreeMap<File, FileOutputStream> nonsharedLockDataSet =
@@ -46,7 +45,9 @@ public class RawLocalFileSystem extends FileSystem {
   // by default use copy/delete instead of rename
   boolean useCopyForRename = true;
   
-  public RawLocalFileSystem() {}
+  public RawLocalFileSystem() {
+    workingDir = new Path(System.getProperty("user.dir")).makeQualified(this);
+  }
   
   /** Convert a path to a File. */
   public File pathToFile(Path path) {
@@ -264,6 +265,11 @@ public class RawLocalFileSystem extends FileSystem {
     return b;
   }
   
+  @Override
+  public Path getHomeDirectory() {
+    return new Path(System.getProperty("user.home")).makeQualified(this);
+  }
+
   /**
    * Set the working directory to the given directory.
    */

+ 1 - 1
src/test/org/apache/hadoop/dfs/TestDecommission.java

@@ -270,7 +270,7 @@ public class TestDecommission extends TestCase {
     assertTrue(localFileSys.mkdirs(dir));
     hostsFile = new Path(dir, "hosts");
     excludeFile = new Path(dir, "exclude");
-    conf.set("dfs.hosts.exclude", excludeFile.toString());
+    conf.set("dfs.hosts.exclude", excludeFile.toUri().getPath());
     writeConfigFile(localFileSys, excludeFile, null);
 
     MiniDFSCluster cluster = new MiniDFSCluster(conf, numDatanodes, true, null);