Browse Source

HADOOP-3202. Use recursive delete rather than FileUtil.fullyDelete.
(Amareshwari Sriramadasu via omalley)


git-svn-id: https://svn.apache.org/repos/asf/hadoop/core/trunk@685322 13f79535-47bb-0310-9956-ffa450edef68

Owen O'Malley 17 năm trước cách đây
mục cha
commit
f7e6fc15a4

+ 3 - 0
CHANGES.txt

@@ -163,6 +163,9 @@ Trunk (unreleased changes)
     HADOOP-9. Use roulette scheduling for temporary space when the size
     is not known. (Ari Rabkin via omalley)
 
+    HADOOP-3202. Use recursive delete rather than FileUtil.fullyDelete.
+    (Amareshwari Sriramadasu via omalley)
+
   OPTIMIZATIONS
 
     HADOOP-3556. Removed lock contention in MD5Hash by changing the 

+ 1 - 5
src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java

@@ -20,16 +20,12 @@ package org.apache.hadoop.streaming;
 
 import java.io.*;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.TestMiniMRWithDFS;
-import org.apache.hadoop.util.*;
 
 import junit.framework.TestCase;
 
@@ -88,7 +84,7 @@ public class TestUlimit extends TestCase {
       writeInputFile(fs, inputPath);
       map = StreamUtil.makeJavaCommand(UlimitApp.class, new String[]{});  
       runProgram(SET_MEMORY_LIMIT);
-      FileUtil.fullyDelete(fs, outputPath);
+      fs.delete(outputPath, true);
       assertFalse("output not cleaned up", fs.exists(outputPath));
       mr.waitUntilIdle();
     } catch(IOException e) {

+ 2 - 10
src/core/org/apache/hadoop/fs/FileUtil.java

@@ -103,19 +103,11 @@ public class FileUtil {
    * @param fs {@link FileSystem} on which the path is present
    * @param dir directory to recursively delete 
    * @throws IOException
+   * @deprecated Use {@link FileSystem#delete(Path, boolean)}
    */
+  @Deprecated
   public static void fullyDelete(FileSystem fs, Path dir) 
   throws IOException {
-    FileStatus[] paths = fs.listStatus(dir);
-    if (paths != null) {
-      for (FileStatus p : paths) {
-        if (!p.isDir())  {
-          fs.delete(p.getPath(), true);
-        } else {
-          fullyDelete(fs, p.getPath());
-        }
-      }
-    }
     fs.delete(dir, true);
   }
 

+ 1 - 2
src/mapred/org/apache/hadoop/mapred/LocalJobRunner.java

@@ -25,7 +25,6 @@ import java.util.HashMap;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.DataOutputBuffer;
@@ -217,7 +216,7 @@ class LocalJobRunner implements JobSubmissionProtocol {
         try {
           if (outputFs != null) {
             if (outputFs.exists(tmpDir)) {
-              FileUtil.fullyDelete(outputFs, tmpDir);
+              outputFs.delete(tmpDir, true);
             }
           }
         } catch (IOException e) {

+ 4 - 4
src/test/org/apache/hadoop/mapred/pipes/TestPipes.java

@@ -46,7 +46,7 @@ public class TestPipes extends TestCase {
     LogFactory.getLog(TestPipes.class.getName());
 
   static void cleanup(FileSystem fs, Path p) throws IOException {
-    FileUtil.fullyDelete(fs, p);
+    fs.delete(p, true);
     assertFalse("output not cleaned up", fs.exists(p));
   }
 
@@ -142,7 +142,7 @@ public class TestPipes extends TestCase {
     job.setNumReduceTasks(numReduces);
     {
       FileSystem fs = dfs.getFileSystem();
-      FileUtil.fullyDelete(fs, wordExec.getParent());
+      fs.delete(wordExec.getParent(), true);
       fs.copyFromLocalFile(program, wordExec);                                         
       Submitter.setExecutable(job, fs.makeQualified(wordExec).toString());
       Submitter.setIsJavaRecordReader(job, true);
@@ -187,7 +187,7 @@ public class TestPipes extends TestCase {
     Path jobXml = new Path(testDir, "job.xml");
     {
       FileSystem fs = dfs.getFileSystem();
-      FileUtil.fullyDelete(fs, wordExec.getParent());
+      fs.delete(wordExec.getParent(), true);
       fs.copyFromLocalFile(program, wordExec);
     }
     DataOutputStream out = local.create(new Path(inDir, "part0"));
@@ -197,7 +197,7 @@ public class TestPipes extends TestCase {
     out = local.create(new Path(inDir, "part1"));
     out.writeBytes("all silly things drink java\n");
     out.close();
-    FileUtil.fullyDelete(local, outDir);
+    local.delete(outDir, true);
     local.mkdirs(outDir);
     out = local.create(jobXml);
     job.write(out);

+ 1 - 1
src/tools/org/apache/hadoop/tools/DistCp.java

@@ -836,7 +836,7 @@ public class DistCp implements Tool {
   static void fullyDelete(String dir, Configuration conf) throws IOException {
     if (dir != null) {
       Path tmp = new Path(dir);
-      FileUtil.fullyDelete(tmp.getFileSystem(conf), tmp);
+      tmp.getFileSystem(conf).delete(tmp, true);
     }
   }