瀏覽代碼

svn merge -c 1373683 from trunk for HADOOP-8700. Use enum to define the checksum constants in DataChecksum.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1375377 13f79535-47bb-0310-9956-ffa450edef68
Tsz-wo Sze 12 年之前
父節點
當前提交
6029f5eb07
共有 14 個文件被更改,包括 93 次插入105 次删除
  1. 3 0
      hadoop-common-project/hadoop-common/CHANGES.txt
  2. 56 52
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java
  3. 10 11
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDataChecksum.java
  4. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReaderLocal.java
  5. 11 12
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
  6. 2 18
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/DataTransferProtoUtil.java
  7. 1 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java
  8. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferProtocol.java
  9. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java
  10. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockRecovery.java
  11. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDiskError.java
  12. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestSimulatedFSDataset.java
  13. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IFileInputStream.java
  14. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IFileOutputStream.java

+ 3 - 0
hadoop-common-project/hadoop-common/CHANGES.txt

@@ -30,6 +30,9 @@ Release 0.23.3 - UNRELEASED
     HADOOP-8242. AbstractDelegationTokenIdentifier: add getter methods
     for owner and realuser. (Colin Patrick McCabe via eli)
 
+    HADOOP-8700.  Use enum to define the checksum constants in DataChecksum.
+    (szetszwo)
+
   OPTIMIZATIONS
 
   BUG FIXES

+ 56 - 52
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java

@@ -43,31 +43,44 @@ public class DataChecksum implements Checksum {
   public static final int CHECKSUM_NULL    = 0;
   public static final int CHECKSUM_CRC32   = 1;
   public static final int CHECKSUM_CRC32C  = 2;
-  
-  private static String[] NAMES = new String[] {
-    "NULL", "CRC32", "CRC32C"
-  };
-  
-  private static final int CHECKSUM_NULL_SIZE  = 0;
-  private static final int CHECKSUM_CRC32_SIZE = 4;
-  private static final int CHECKSUM_CRC32C_SIZE = 4;
-  
-  
-  public static DataChecksum newDataChecksum( int type, int bytesPerChecksum ) {
+ 
+  /** The checksum types */
+  public static enum Type {
+    NULL  (CHECKSUM_NULL, 0),
+    CRC32 (CHECKSUM_CRC32, 4),
+    CRC32C(CHECKSUM_CRC32C, 4);
+
+    public final int id;
+    public final int size;
+    
+    private Type(int id, int size) {
+      this.id = id;
+      this.size = size;
+    }
+
+    /** @return the type corresponding to the id. */
+    public static Type valueOf(int id) {
+      if (id < 0 || id >= values().length) {
+        throw new IllegalArgumentException("id=" + id
+            + " out of range [0, " + values().length + ")");
+      }
+      return values()[id];
+    }
+  }
+
+
+  public static DataChecksum newDataChecksum(Type type, int bytesPerChecksum ) {
     if ( bytesPerChecksum <= 0 ) {
       return null;
     }
     
     switch ( type ) {
-    case CHECKSUM_NULL :
-      return new DataChecksum( CHECKSUM_NULL, new ChecksumNull(), 
-                               CHECKSUM_NULL_SIZE, bytesPerChecksum );
-    case CHECKSUM_CRC32 :
-      return new DataChecksum( CHECKSUM_CRC32, new PureJavaCrc32(), 
-                               CHECKSUM_CRC32_SIZE, bytesPerChecksum );
-    case CHECKSUM_CRC32C:
-      return new DataChecksum( CHECKSUM_CRC32C, new PureJavaCrc32C(),
-                               CHECKSUM_CRC32C_SIZE, bytesPerChecksum);
+    case NULL :
+      return new DataChecksum(type, new ChecksumNull(), bytesPerChecksum );
+    case CRC32 :
+      return new DataChecksum(type, new PureJavaCrc32(), bytesPerChecksum );
+    case CRC32C:
+      return new DataChecksum(type, new PureJavaCrc32C(), bytesPerChecksum);
     default:
       return null;  
     }
@@ -87,7 +100,7 @@ public class DataChecksum implements Checksum {
                            ( (bytes[offset+2] & 0xff) << 16 ) |
                            ( (bytes[offset+3] & 0xff) << 8 )  |
                            ( (bytes[offset+4] & 0xff) );
-    return newDataChecksum( bytes[0], bytesPerChecksum );
+    return newDataChecksum( Type.valueOf(bytes[0]), bytesPerChecksum );
   }
   
   /**
@@ -98,7 +111,7 @@ public class DataChecksum implements Checksum {
                                  throws IOException {
     int type = in.readByte();
     int bpc = in.readInt();
-    DataChecksum summer = newDataChecksum( type, bpc );
+    DataChecksum summer = newDataChecksum(Type.valueOf(type), bpc );
     if ( summer == null ) {
       throw new IOException( "Could not create DataChecksum of type " +
                              type + " with bytesPerChecksum " + bpc );
@@ -111,13 +124,13 @@ public class DataChecksum implements Checksum {
    */
   public void writeHeader( DataOutputStream out ) 
                            throws IOException { 
-    out.writeByte( type );
+    out.writeByte( type.id );
     out.writeInt( bytesPerChecksum );
   }
 
   public byte[] getHeader() {
     byte[] header = new byte[DataChecksum.HEADER_LEN];
-    header[0] = (byte) (type & 0xff);
+    header[0] = (byte) (type.id & 0xff);
     // Writing in buffer just like DataOutput.WriteInt()
     header[1+0] = (byte) ((bytesPerChecksum >>> 24) & 0xff);
     header[1+1] = (byte) ((bytesPerChecksum >>> 16) & 0xff);
@@ -133,11 +146,11 @@ public class DataChecksum implements Checksum {
    */
    public int writeValue( DataOutputStream out, boolean reset )
                           throws IOException {
-     if ( size <= 0 ) {
+     if ( type.size <= 0 ) {
        return 0;
      }
 
-     if ( size == 4 ) {
+     if ( type.size == 4 ) {
        out.writeInt( (int) summer.getValue() );
      } else {
        throw new IOException( "Unknown Checksum " + type );
@@ -147,7 +160,7 @@ public class DataChecksum implements Checksum {
        reset();
      }
      
-     return size;
+     return type.size;
    }
    
    /**
@@ -157,11 +170,11 @@ public class DataChecksum implements Checksum {
     */
     public int writeValue( byte[] buf, int offset, boolean reset )
                            throws IOException {
-      if ( size <= 0 ) {
+      if ( type.size <= 0 ) {
         return 0;
       }
 
-      if ( size == 4 ) {
+      if ( type.size == 4 ) {
         int checksum = (int) summer.getValue();
         buf[offset+0] = (byte) ((checksum >>> 24) & 0xff);
         buf[offset+1] = (byte) ((checksum >>> 16) & 0xff);
@@ -175,7 +188,7 @@ public class DataChecksum implements Checksum {
         reset();
       }
       
-      return size;
+      return type.size;
     }
    
    /**
@@ -183,36 +196,33 @@ public class DataChecksum implements Checksum {
     * @return true if the checksum matches and false otherwise.
     */
    public boolean compare( byte buf[], int offset ) {
-     if ( size == 4 ) {
+     if ( type.size == 4 ) {
        int checksum = ( (buf[offset+0] & 0xff) << 24 ) | 
                       ( (buf[offset+1] & 0xff) << 16 ) |
                       ( (buf[offset+2] & 0xff) << 8 )  |
                       ( (buf[offset+3] & 0xff) );
        return checksum == (int) summer.getValue();
      }
-     return size == 0;
+     return type.size == 0;
    }
    
-  private final int type;
-  private final int size;
+  private final Type type;
   private final Checksum summer;
   private final int bytesPerChecksum;
   private int inSum = 0;
   
-  private DataChecksum( int checksumType, Checksum checksum,
-                        int sumSize, int chunkSize ) {
-    type = checksumType;
+  private DataChecksum( Type type, Checksum checksum, int chunkSize ) {
+    this.type = type;
     summer = checksum;
-    size = sumSize;
     bytesPerChecksum = chunkSize;
   }
   
   // Accessors
-  public int getChecksumType() {
+  public Type getChecksumType() {
     return type;
   }
   public int getChecksumSize() {
-    return size;
+    return type.size;
   }
   public int getBytesPerChecksum() {
     return bytesPerChecksum;
@@ -260,7 +270,7 @@ public class DataChecksum implements Checksum {
   public void verifyChunkedSums(ByteBuffer data, ByteBuffer checksums,
       String fileName, long basePos)
   throws ChecksumException {
-    if (size == 0) return;
+    if (type.size == 0) return;
     
     if (data.hasArray() && checksums.hasArray()) {
       verifyChunkedSums(
@@ -270,7 +280,7 @@ public class DataChecksum implements Checksum {
       return;
     }
     if (NativeCrc32.isAvailable()) {
-      NativeCrc32.verifyChunkedSums(bytesPerChecksum, type, checksums, data,
+      NativeCrc32.verifyChunkedSums(bytesPerChecksum, type.id, checksums, data,
           fileName, basePos);
       return;
     }
@@ -280,7 +290,7 @@ public class DataChecksum implements Checksum {
     checksums.mark();
     try {
       byte[] buf = new byte[bytesPerChecksum];
-      byte[] sum = new byte[size];
+      byte[] sum = new byte[type.size];
       while (data.remaining() > 0) {
         int n = Math.min(data.remaining(), bytesPerChecksum);
         checksums.get(sum);
@@ -351,7 +361,7 @@ public class DataChecksum implements Checksum {
    *                  buffer to put the checksums.
    */
   public void calculateChunkedSums(ByteBuffer data, ByteBuffer checksums) {
-    if (size == 0) return;
+    if (type.size == 0) return;
     
     if (data.hasArray() && checksums.hasArray()) {
       calculateChunkedSums(data.array(), data.arrayOffset() + data.position(), data.remaining(),
@@ -411,18 +421,12 @@ public class DataChecksum implements Checksum {
   
   @Override
   public int hashCode() {
-    return (this.type + 31) * this.bytesPerChecksum;
+    return (this.type.id + 31) * this.bytesPerChecksum;
   }
   
   @Override
   public String toString() {
-    String strType;
-    if (type < NAMES.length && type > 0) {
-      strType = NAMES[type];
-    } else {
-      strType = String.valueOf(type);
-    }
-    return "DataChecksum(type=" + strType +
+    return "DataChecksum(type=" + type +
       ", chunkSize=" + bytesPerChecksum + ")";
   }
   

+ 10 - 11
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDataChecksum.java

@@ -35,13 +35,13 @@ public class TestDataChecksum {
   private static final int DATA_TRAILER_IN_BUFFER = 3;
   
   private static final int BYTES_PER_CHUNK = 512;
-  private static final int CHECKSUM_TYPES[] = new int[] {
-    DataChecksum.CHECKSUM_CRC32, DataChecksum.CHECKSUM_CRC32C
+  private static final DataChecksum.Type CHECKSUM_TYPES[] = {
+    DataChecksum.Type.CRC32, DataChecksum.Type.CRC32C
   };
   
   @Test
   public void testBulkOps() throws Exception {
-    for (int type : CHECKSUM_TYPES) {
+    for (DataChecksum.Type type : CHECKSUM_TYPES) {
       System.err.println(
           "---- beginning tests with checksum type " + type  + "----");
       DataChecksum checksum = DataChecksum.newDataChecksum(
@@ -118,21 +118,20 @@ public class TestDataChecksum {
   @Test
   public void testEquality() {
     assertEquals(
-        DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 512),
-        DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 512));
+        DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 512),
+        DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 512));
     assertFalse(
-        DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 512).equals(
-        DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 1024)));
+        DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 512).equals(
+        DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 1024)));
     assertFalse(
-        DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 512).equals(
-        DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32C, 512)));        
+        DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 512).equals(
+        DataChecksum.newDataChecksum(DataChecksum.Type.CRC32C, 512)));        
   }
   
   @Test
   public void testToString() {
     assertEquals("DataChecksum(type=CRC32, chunkSize=512)",
-        DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 512)
-          .toString());
+        DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 512).toString());
   }
 
   private static void corruptBufferOffset(ByteBuffer buf, int offset) {

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReaderLocal.java

@@ -259,7 +259,7 @@ class BlockReaderLocal implements BlockReader {
       long length, BlockLocalPathInfo pathinfo, FileInputStream dataIn)
       throws IOException {
     this(conf, hdfsfile, block, token, startOffset, length, pathinfo,
-        DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_NULL, 4), false,
+        DataChecksum.newDataChecksum(DataChecksum.Type.NULL, 4), false,
         dataIn, startOffset, null);
   }
 

+ 11 - 12
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java

@@ -171,7 +171,7 @@ public class DFSClient implements java.io.Closeable {
     final int maxBlockAcquireFailures;
     final int confTime;
     final int ioBufferSize;
-    final int checksumType;
+    final DataChecksum.Type checksumType;
     final int bytesPerChecksum;
     final int writePacketSize;
     final int socketTimeout;
@@ -229,18 +229,17 @@ public class DFSClient implements java.io.Closeable {
           DFS_CLIENT_USE_LEGACY_BLOCKREADER_DEFAULT);
     }
 
-    private int getChecksumType(Configuration conf) {
-      String checksum = conf.get(DFSConfigKeys.DFS_CHECKSUM_TYPE_KEY,
+    private DataChecksum.Type getChecksumType(Configuration conf) {
+      final String checksum = conf.get(
+          DFSConfigKeys.DFS_CHECKSUM_TYPE_KEY,
           DFSConfigKeys.DFS_CHECKSUM_TYPE_DEFAULT);
-      if ("CRC32".equals(checksum)) {
-        return DataChecksum.CHECKSUM_CRC32;
-      } else if ("CRC32C".equals(checksum)) {
-        return DataChecksum.CHECKSUM_CRC32C;
-      } else if ("NULL".equals(checksum)) {
-        return DataChecksum.CHECKSUM_NULL;
-      } else {
-        LOG.warn("Bad checksum type: " + checksum + ". Using default.");
-        return DataChecksum.CHECKSUM_CRC32C;
+      try {
+        return DataChecksum.Type.valueOf(checksum);
+      } catch(IllegalArgumentException iae) {
+        LOG.warn("Bad checksum type: " + checksum + ". Using default "
+            + DFSConfigKeys.DFS_CHECKSUM_TYPE_DEFAULT);
+        return DataChecksum.Type.valueOf(
+            DFSConfigKeys.DFS_CHECKSUM_TYPE_DEFAULT); 
       }
     }
 

+ 2 - 18
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/DataTransferProtoUtil.java

@@ -31,9 +31,6 @@ import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.DataChecksum;
 
-import com.google.common.collect.BiMap;
-import com.google.common.collect.ImmutableBiMap;
-
 
 /**
  * Static utilities for dealing with the protocol buffers used by the
@@ -42,19 +39,6 @@ import com.google.common.collect.ImmutableBiMap;
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
 public abstract class DataTransferProtoUtil {
-
-  /**
-   * Map between the internal DataChecksum identifiers and the protobuf-
-   * generated identifiers on the wire.
-   */
-  static BiMap<Integer, ChecksumProto.ChecksumType> checksumTypeMap =
-    ImmutableBiMap.<Integer, ChecksumProto.ChecksumType>builder()
-      .put(DataChecksum.CHECKSUM_CRC32, ChecksumProto.ChecksumType.CRC32)
-      .put(DataChecksum.CHECKSUM_CRC32C, ChecksumProto.ChecksumType.CRC32C)
-      .put(DataChecksum.CHECKSUM_NULL, ChecksumProto.ChecksumType.NULL)
-      .build();
-
-  
   static BlockConstructionStage fromProto(
       OpWriteBlockProto.BlockConstructionStage stage) {
     return BlockConstructionStage.valueOf(BlockConstructionStage.class,
@@ -68,7 +52,7 @@ public abstract class DataTransferProtoUtil {
   }
 
   public static ChecksumProto toProto(DataChecksum checksum) {
-    ChecksumType type = checksumTypeMap.get(checksum.getChecksumType());
+    ChecksumType type = ChecksumType.valueOf(checksum.getChecksumType().name());
     if (type == null) {
       throw new IllegalArgumentException(
           "Can't convert checksum to protobuf: " + checksum);
@@ -84,7 +68,7 @@ public abstract class DataTransferProtoUtil {
     if (proto == null) return null;
 
     int bytesPerChecksum = proto.getBytesPerChecksum();
-    int type = checksumTypeMap.inverse().get(proto.getType());
+    DataChecksum.Type type = DataChecksum.Type.valueOf(proto.getType().name());
     
     return DataChecksum.newDataChecksum(type, bytesPerChecksum);
   }

+ 1 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java

@@ -234,8 +234,7 @@ class BlockSender implements java.io.Closeable {
       } else {
         LOG.warn("Could not find metadata file for " + block);
         // This only decides the buffer size. Use BUFFER_SIZE?
-        csum = DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_NULL,
-            16 * 1024);
+        csum = DataChecksum.newDataChecksum(DataChecksum.Type.NULL, 16 * 1024);
       }
 
       /*

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferProtocol.java

@@ -73,7 +73,7 @@ public class TestDataTransferProtocol extends TestCase {
                     "org.apache.hadoop.hdfs.TestDataTransferProtocol");
 
   private static final DataChecksum DEFAULT_CHECKSUM =
-    DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32C, 512);
+    DataChecksum.newDataChecksum(DataChecksum.Type.CRC32C, 512);
   
   DatanodeID datanode;
   InetSocketAddress dnAddr;

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java

@@ -88,8 +88,8 @@ public class SimulatedFSDataset implements FSDatasetInterface {
   
   static final byte[] nullCrcFileData;
   static {
-    DataChecksum checksum = DataChecksum.newDataChecksum( DataChecksum.
-                              CHECKSUM_NULL, 16*1024 );
+    DataChecksum checksum = DataChecksum.newDataChecksum(
+        DataChecksum.Type.NULL, 16*1024 );
     byte[] nullCrcHeader = checksum.getHeader();
     nullCrcFileData =  new byte[2 + nullCrcHeader.length];
     nullCrcFileData[0] = (byte) ((BlockMetadataHeader.VERSION >>> 8) & 0xff);

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockRecovery.java

@@ -539,7 +539,7 @@ public class TestBlockRecovery {
     BlockWriteStreams streams = null;
     try {
       streams = replicaInfo.createStreams(true,
-          DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 512));
+          DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 512));
       streams.checksumOut.write('a');
       dn.data.initReplicaRecovery(new RecoveringBlock(block, null, RECOVERY_ID+1));
       try {

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDiskError.java

@@ -142,7 +142,7 @@ public class TestDiskError {
     DataOutputStream out = new DataOutputStream(s.getOutputStream());
 
     DataChecksum checksum = DataChecksum.newDataChecksum(
-        DataChecksum.CHECKSUM_CRC32, 512);
+        DataChecksum.Type.CRC32, 512);
     new Sender(out).writeBlock(block.getBlock(),
         BlockTokenSecretManager.DUMMY_TOKEN, "",
         new DatanodeInfo[0], null,

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestSimulatedFSDataset.java

@@ -65,7 +65,7 @@ public class TestSimulatedFSDataset extends TestCase {
       // data written
       ReplicaInPipelineInterface bInfo = fsdataset.createRbw(b);
       BlockWriteStreams out = bInfo.createStreams(true,
-          DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 512));
+          DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 512));
       try {
         OutputStream dataOut  = out.dataOut;
         assertEquals(0, fsdataset.getLength(b));
@@ -115,7 +115,7 @@ public class TestSimulatedFSDataset extends TestCase {
     short version = metaDataInput.readShort();
     assertEquals(BlockMetadataHeader.VERSION, version);
     DataChecksum checksum = DataChecksum.newDataChecksum(metaDataInput);
-    assertEquals(DataChecksum.CHECKSUM_NULL, checksum.getChecksumType());
+    assertEquals(DataChecksum.Type.NULL, checksum.getChecksumType());
     assertEquals(0, checksum.getChecksumSize());  
   }
 

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IFileInputStream.java

@@ -53,7 +53,7 @@ public class IFileInputStream extends InputStream {
    */
   public IFileInputStream(InputStream in, long len) {
     this.in = in;
-    sum = DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 
+    sum = DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 
         Integer.MAX_VALUE);
     checksumSize = sum.getChecksumSize();
     length = len;

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IFileOutputStream.java

@@ -49,7 +49,7 @@ public class IFileOutputStream extends FilterOutputStream {
    */
   public IFileOutputStream(OutputStream out) {
     super(out);
-    sum = DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32,
+    sum = DataChecksum.newDataChecksum(DataChecksum.Type.CRC32,
         Integer.MAX_VALUE);
     barray = new byte[sum.getChecksumSize()];
   }