Browse Source

MAPREDUCE-3803. Merge change r1240441 from trunk

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1240444 13f79535-47bb-0310-9956-ffa450edef68
Suresh Srinivas 13 years ago
parent
commit
b6bcde5ce9

+ 3 - 0
hadoop-mapreduce-project/CHANGES.txt

@@ -638,6 +638,9 @@ Release 0.23.1 - Unreleased
     MAPREDUCE-3417. Fixed job-access-controls to work with MR AM and
     MAPREDUCE-3417. Fixed job-access-controls to work with MR AM and
     JobHistoryServer web-apps. (Jonathan Eagles via vinodkv)
     JobHistoryServer web-apps. (Jonathan Eagles via vinodkv)
 
 
+    MAPREDUCE-3803. Fix broken build of raid contrib due to HDFS-2864.
+    (Ravi Prakash via suresh)
+
 Release 0.23.0 - 2011-11-01 
 Release 0.23.0 - 2011-11-01 
 
 
   INCOMPATIBLE CHANGES
   INCOMPATIBLE CHANGES

+ 1 - 1
hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/datanode/RaidBlockSender.java

@@ -108,7 +108,7 @@ public class RaidBlockSender implements java.io.Closeable {
        BlockMetadataHeader header = BlockMetadataHeader.readHeader(checksumIn);
        BlockMetadataHeader header = BlockMetadataHeader.readHeader(checksumIn);
        short version = header.getVersion();
        short version = header.getVersion();
 
 
-        if (version != FSDataset.METADATA_VERSION) {
+        if (version != BlockMetadataHeader.VERSION) {
           LOG.warn("Wrong version (" + version + ") for metadata file for "
           LOG.warn("Wrong version (" + version + ") for metadata file for "
               + block + " ignoring ...");
               + block + " ignoring ...");
         }
         }

+ 2 - 2
hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/raid/BlockFixer.java

@@ -51,7 +51,7 @@ import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
-import org.apache.hadoop.hdfs.server.datanode.FSDataset;
+import org.apache.hadoop.hdfs.server.datanode.BlockMetadataHeader;
 import org.apache.hadoop.hdfs.server.datanode.RaidBlockSender;
 import org.apache.hadoop.hdfs.server.datanode.RaidBlockSender;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
@@ -646,7 +646,7 @@ public abstract class BlockFixer extends Configured implements Runnable {
       DataOutputStream mdOut = new DataOutputStream(mdOutBase);
       DataOutputStream mdOut = new DataOutputStream(mdOutBase);
       
       
       // First, write out the version.
       // First, write out the version.
-      mdOut.writeShort(FSDataset.METADATA_VERSION);
+      mdOut.writeShort(BlockMetadataHeader.VERSION);
       
       
       // Create a summer and write out its header.
       // Create a summer and write out its header.
       int bytesPerChecksum = conf.getInt("io.bytes.per.checksum", 512);
       int bytesPerChecksum = conf.getInt("io.bytes.per.checksum", 512);