Bläddra i källkod

HADOOP-16878. FileUtil.copy() to throw IOException if the source and destination are the same (#2383)

Contributed by Gabor Bota.
Ayush Saxena 4 år sedan
förälder
incheckning
54c40cbf49

+ 6 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java

@@ -398,6 +398,12 @@ public class FileUtil {
                              Configuration conf) throws IOException {
     Path src = srcStatus.getPath();
     dst = checkDest(src.getName(), dstFS, dst, overwrite);
+
+    if (srcFS.makeQualified(src).equals(dstFS.makeQualified(dst))) {
+      throw new PathOperationException("Source (" + src + ") and destination " +
+          "(" + dst + ") are equal in the copy command.");
+    }
+
     if (srcStatus.isDirectory()) {
       checkDependencies(srcFS, src, dstFS, dst);
       if (!dstFS.mkdirs(dst)) {

+ 15 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java

@@ -34,6 +34,7 @@ import java.io.PrintStream;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.test.LambdaTestUtils;
 import org.apache.hadoop.util.StringUtils;
 import org.junit.Before;
 import org.junit.BeforeClass;
@@ -175,7 +176,20 @@ public class TestFsShellCopy {
     checkPut(dirPath, targetDir, true);
   }
 
-  
+  @Test
+  public void testCopyBetweenFsEqualPath() throws Exception {
+    Path testRoot = new Path(testRootDir, "testPutFile");
+    lfs.delete(testRoot, true);
+    lfs.mkdirs(testRoot);
+
+    Path filePath = new Path(testRoot, "sameSourceTarget");
+    lfs.create(filePath).close();
+    final FileStatus status = lfs.getFileStatus(filePath);
+    LambdaTestUtils.intercept(PathOperationException.class, () ->
+        FileUtil.copy(lfs, status, lfs, filePath, false, true, conf)
+    );
+  }
+
   private void checkPut(Path srcPath, Path targetDir, boolean useWindowsPath)
   throws Exception {
     lfs.delete(targetDir, true);

+ 16 - 0
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java

@@ -65,6 +65,7 @@ import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileAlreadyExistsException;
 import org.apache.hadoop.fs.FileSystem.Statistics.StatisticsData;
+import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.FsServerDefaults;
 import org.apache.hadoop.fs.FileChecksum;
 import org.apache.hadoop.fs.FileStatus;
@@ -75,6 +76,7 @@ import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum;
 import org.apache.hadoop.fs.Options.ChecksumOpt;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException;
+import org.apache.hadoop.fs.PathOperationException;
 import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.fs.StorageStatistics.LongStatistic;
 import org.apache.hadoop.fs.StorageType;
@@ -2090,4 +2092,18 @@ public class TestDistributedFileSystem {
       assertFalse(result.isSupported());
     }
   }
+
+  @Test
+  public void testCopyBetweenFsEqualPath() throws Exception {
+    Configuration conf = getTestConfiguration();
+    try (MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build()) {
+      cluster.waitActive();
+      final DistributedFileSystem dfs = cluster.getFileSystem();
+      Path filePath = new Path("/dir/file");
+      dfs.create(filePath).close();
+      FileStatus fstatus = dfs.getFileStatus(filePath);
+      LambdaTestUtils.intercept(PathOperationException.class,
+          () -> FileUtil.copy(dfs, fstatus, dfs, filePath, false, true, conf));
+    }
+  }
 }