瀏覽代碼

HDFS-15302. Backport HDFS-15286 to branch-2.x. Contributed by hemanthboyina.

(cherry picked from commit b22fb903508bacfdf398ca866b6c1924367ad177)
Akira Ajisaka 5 年之前
父節點
當前提交
058e57fb41

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirConcatOp.java

@@ -151,7 +151,7 @@ class FSDirConcatOp {
             + " is referred by some other reference in some snapshot.");
       }
       // source file cannot be the same with the target file
-      if (srcINode == targetINode) {
+      if (srcINode.equals(targetINode)) {
         throw new HadoopIllegalArgumentException("concat: the src file " + src
             + " is the same with the target file " + targetIIP.getPath());
       }

+ 40 - 0
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestHDFSConcat.java

@@ -524,4 +524,44 @@ public class TestHDFSConcat {
       GenericTestUtils.assertExceptionContains(errMsg, e);
     }
   }
+
+  /**
+   * Test concat on same source and target file which is a inode reference.
+   */
+  @Test
+  public void testConcatOnSameFile() throws Exception {
+    String dir = "/dir1";
+    Path trgDir = new Path(dir);
+    dfs.mkdirs(new Path(dir));
+
+    // create a source file
+    String dir2 = "/dir2";
+    Path srcDir = new Path(dir2);
+    dfs.mkdirs(srcDir);
+    dfs.allowSnapshot(srcDir);
+    Path src = new Path(srcDir, "file1");
+    DFSTestUtil.createFile(dfs, src, 512, (short) 2, 0);
+
+    // make the file as an Inode reference and delete the reference
+    dfs.createSnapshot(srcDir, "s1");
+    dfs.rename(src, trgDir);
+    dfs.deleteSnapshot(srcDir, "s1");
+    Path[] srcs = new Path[1];
+    srcs[0] = new Path(dir, "file1");
+
+    // perform concat
+    try {
+      dfs.concat(srcs[0], srcs);
+    } catch (RemoteException e) {
+      GenericTestUtils.assertExceptionContains(
+          "concat: the src file /dir1/file1 is the same with the target", e);
+    }
+
+    // the file should exists and read the file
+    byte[] buff = new byte[1080];
+    FSDataInputStream stream = dfs.open(srcs[0]);
+    stream.readFully(0, buff, 0, 512);
+
+    assertEquals(1, dfs.getContentSummary(new Path(dir)).getFileCount());
+  }
 }