Bläddra i källkod

HDFS-2505. Add a test to verify getFileChecksum(..) with ViewFS. Contributed by Ravi Prakash

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1310522 13f79535-47bb-0310-9956-ffa450edef68
Tsz-wo Sze 13 år sedan
förälder
incheckning
58e920d1a0

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

@@ -766,6 +766,9 @@ Release 0.23.3 - UNRELEASED
 
   IMPROVEMENTS
 
+    HDFS-2505. Add a test to verify getFileChecksum(..) with ViewFS.  (Ravi
+    Prakash via szetszwo)
+
   OPTIMIZATIONS
 
   BUG FIXES

+ 35 - 8
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsFileStatusHdfs.java

@@ -30,6 +30,7 @@ import java.net.URISyntaxException;
 import javax.security.auth.login.LoginException;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileChecksum;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystemTestHelper;
@@ -48,13 +49,13 @@ import static org.junit.Assert.*;
 public class TestViewFsFileStatusHdfs {
   
   static final String testfilename = "/tmp/testFileStatusSerialziation";
+  static final String someFile = "/hdfstmp/someFileForTestGetFileChecksum";
 
-  
-  
   private static MiniDFSCluster cluster;
   private static Path defaultWorkingDirectory;
   private static Configuration CONF = new Configuration();
   private static FileSystem fHdfs;
+  private static FileSystem vfs;
   
   @BeforeClass
   public static void clusterSetupAtBegining() throws IOException,
@@ -65,18 +66,19 @@ public class TestViewFsFileStatusHdfs {
     defaultWorkingDirectory = fHdfs.makeQualified( new Path("/user/" + 
         UserGroupInformation.getCurrentUser().getShortUserName()));
     fHdfs.mkdirs(defaultWorkingDirectory);
+
+    // Setup the ViewFS to be used for all tests.
+    Configuration conf = ViewFileSystemTestSetup.createConfig();
+    ConfigUtil.addLink(conf, "/vfstmp", new URI(fHdfs.getUri() + "/hdfstmp"));
+    ConfigUtil.addLink(conf, "/tmp", new URI(fHdfs.getUri() + "/tmp"));
+    vfs = FileSystem.get(FsConstants.VIEWFS_URI, conf);
+    assertEquals(ViewFileSystem.class, vfs.getClass());
   }
 
   @Test
   public void testFileStatusSerialziation()
       throws IOException, URISyntaxException {
-
    long len = FileSystemTestHelper.createFile(fHdfs, testfilename);
-
-    Configuration conf = ViewFileSystemTestSetup.createConfig();
-    ConfigUtil.addLink(conf, "/tmp", new URI(fHdfs.getUri().toString() + "/tmp"));
-    FileSystem vfs = FileSystem.get(FsConstants.VIEWFS_URI, conf);
-    assertEquals(ViewFileSystem.class, vfs.getClass());
     FileStatus stat = vfs.getFileStatus(new Path(testfilename));
     assertEquals(len, stat.getLen());
     // check serialization/deserialization
@@ -89,9 +91,34 @@ public class TestViewFsFileStatusHdfs {
     assertEquals(len, deSer.getLen());
   }
 
+  @Test
+  public void testGetFileChecksum() throws IOException, URISyntaxException {
+    // Create two different files in HDFS
+    FileSystemTestHelper.createFile(fHdfs, someFile);
+    FileSystemTestHelper.createFile(fHdfs, FileSystemTestHelper
+      .getTestRootPath(fHdfs, someFile + "other"), 1, 512);
+    // Get checksum through ViewFS
+    FileChecksum viewFSCheckSum = vfs.getFileChecksum(
+      new Path("/vfstmp/someFileForTestGetFileChecksum"));
+    // Get checksum through HDFS. 
+    FileChecksum hdfsCheckSum = fHdfs.getFileChecksum(
+      new Path(someFile));
+    // Get checksum of different file in HDFS
+    FileChecksum otherHdfsFileCheckSum = fHdfs.getFileChecksum(
+      new Path(someFile+"other"));
+    // Checksums of the same file (got through HDFS and ViewFS should be same)
+    assertEquals("HDFS and ViewFS checksums were not the same", viewFSCheckSum,
+      hdfsCheckSum);
+    // Checksum of different files should be different.
+    assertFalse("Some other HDFS file which should not have had the same " +
+      "checksum as viewFS did!", viewFSCheckSum.equals(otherHdfsFileCheckSum));
+  }
+
   @AfterClass
   public static void cleanup() throws IOException {
     fHdfs.delete(new Path(testfilename), true);
+    fHdfs.delete(new Path(someFile), true);
+    fHdfs.delete(new Path(someFile + "other"), true);
   }
 
 }