Parcourir la source

HADOOP-4277. Checksum verification was mistakenly disabled for
LocalFileSystem. (Raghu Angadi)


git-svn-id: https://svn.apache.org/repos/asf/hadoop/core/branches/branch-0.18@699493 13f79535-47bb-0310-9956-ffa450edef68

Raghu Angadi il y a 16 ans
Parent
commit
b641b5f0de

+ 7 - 0
CHANGES.txt

@@ -828,6 +828,13 @@ Release 0.18.0 - 2008-08-19
     HADOOP-3947. Fix a problem in tasktracker reinitialization. 
     (Amareshwari Sriramadasu via ddas)
 
+Release 0.17.3 - Unreleased
+
+  BUG FIXES
+
+    HADOOP-4277. Checksum verification was mistakenly disabled for
+    LocalFileSystem. (Raghu Angadi)
+
 Release 0.17.2 - 2008-08-11
 
   BUG FIXES

+ 1 - 1
src/core/org/apache/hadoop/fs/FSInputChecker.java

@@ -37,7 +37,7 @@ abstract public class FSInputChecker extends FSInputStream {
   /** The file name from which data is read from */
   protected Path file;
   private Checksum sum;
-  private boolean verifyChecksum;
+  private boolean verifyChecksum = true;
   private byte[] buf;
   private byte[] checksum;
   private int pos;

+ 3 - 0
src/test/org/apache/hadoop/dfs/TestDFSShell.java

@@ -195,6 +195,9 @@ public class TestDFSShell extends TestCase {
     final DistributedFileSystem dfs = (DistributedFileSystem)fs;
 
     try {
+      // remove left over crc files:
+      new File(TEST_ROOT_DIR, ".f1.crc").delete();
+      new File(TEST_ROOT_DIR, ".f2.crc").delete();    
       final File f1 = createLocalFile(new File(TEST_ROOT_DIR, "f1"));
       final File f2 = createLocalFile(new File(TEST_ROOT_DIR, "f2"));
   

+ 63 - 1
src/test/org/apache/hadoop/dfs/TestFSInputChecker.java

@@ -21,11 +21,14 @@ import junit.framework.TestCase;
 import java.io.*;
 import java.util.Random;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.ChecksumException;
 import org.apache.hadoop.fs.ChecksumFileSystem;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
 
 /**
  * This class tests if FSInputChecker works correctly.
@@ -40,6 +43,7 @@ public class TestFSInputChecker extends TestCase {
   byte[] expected = new byte[FILE_SIZE];
   byte[] actual;
   FSDataInputStream stm;
+  Random rand = new Random(seed);
 
   /* create a file */
   private void writeFile(FileSystem fileSys, Path name) throws IOException {
@@ -216,13 +220,70 @@ public class TestFSInputChecker extends TestCase {
     cleanupFile(fileSys, file);
   }
   
+  private void testFileCorruption(LocalFileSystem fileSys) throws IOException {
+    // create a file and verify that checksum corruption results in 
+    // a checksum exception on LocalFS
+    
+    String dir = System.getProperty("test.build.data", ".");
+    Path file = new Path(dir + "/corruption-test.dat");
+    Path crcFile = new Path(dir + "/.corruption-test.dat.crc");
+    
+    writeFile(fileSys, file);
+    
+    int fileLen = (int)fileSys.getFileStatus(file).getLen();
+    
+    byte [] buf = new byte[fileLen];
+
+    InputStream in = fileSys.open(file);
+    IOUtils.readFully(in, buf, 0, buf.length);
+    in.close();
+    
+    // check .crc corruption
+    checkFileCorruption(fileSys, file, crcFile);
+    fileSys.delete(file, true);
+    
+    writeFile(fileSys, file);
+    
+    // check data corrutpion
+    checkFileCorruption(fileSys, file, file);
+    
+    fileSys.delete(file, true);
+  }
+  
+  private void checkFileCorruption(LocalFileSystem fileSys, Path file, 
+                                   Path fileToCorrupt) throws IOException {
+    
+    // corrupt the file 
+    RandomAccessFile out = 
+      new RandomAccessFile(new File(fileToCorrupt.toString()), "rw");
+    
+    byte[] buf = new byte[(int)fileSys.getFileStatus(file).getLen()];    
+    int corruptFileLen = (int)fileSys.getFileStatus(fileToCorrupt).getLen();
+    assertTrue(buf.length >= corruptFileLen);
+    
+    rand.nextBytes(buf);
+    out.seek(corruptFileLen/2);
+    out.write(buf, 0, corruptFileLen/4);
+    out.close();
+
+    boolean gotException = false;
+    
+    InputStream in = fileSys.open(file);
+    try {
+      IOUtils.readFully(in, buf, 0, buf.length);
+    } catch (ChecksumException e) {
+      gotException = true;
+    }
+    assertTrue(gotException);
+    in.close();    
+  }
+  
   public void testFSInputChecker() throws Exception {
     Configuration conf = new Configuration();
     conf.setLong("dfs.block.size", BLOCK_SIZE);
     conf.setInt("io.bytes.per.checksum", BYTES_PER_SUM);
     conf.set("fs.hdfs.impl",
              "org.apache.hadoop.dfs.ChecksumDistributedFileSystem");
-    Random rand = new Random(seed);
     rand.nextBytes(expected);
 
     // test DFS
@@ -242,6 +303,7 @@ public class TestFSInputChecker extends TestCase {
     try {
       testChecker(fileSys, true);
       testChecker(fileSys, false);
+      testFileCorruption((LocalFileSystem)fileSys);
     }finally {
       fileSys.close();
     }