Sfoglia il codice sorgente

HDFS-17190. EC: Fix bug of OIV processing XAttr. (#6067). Contributed by Shuyan Zhang.

Signed-off-by: He Xiaoqiao <hexiaoqiao@apache.org>
zhangshuyan 1 anno fa
parent
commit
9e489b9ab5

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/XAttrFormat.java

@@ -72,7 +72,7 @@ public enum XAttrFormat implements LongBitFormat.Enum {
     return SerialNumberManager.XATTR.getString(nid);
   }
 
-  static int toInt(XAttr a) {
+  public static int toInt(XAttr a) {
     int nid = SerialNumberManager.XATTR.getSerialNumber(a.getName());
     int nsOrd = a.getNameSpace().ordinal();
     long value = NS.BITS.combine(nsOrd & NS_MASK, 0L);

+ 4 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageTextWriter.java

@@ -52,6 +52,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.XAttr;
 import org.apache.hadoop.fs.permission.PermissionStatus;
+import org.apache.hadoop.hdfs.XAttrHelper;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants;
 import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite;
 import org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode;
@@ -514,6 +515,8 @@ abstract class PBImageTextWriter implements Closeable {
   private File filename;
   private int numThreads;
   private String parallelOutputFile;
+  private final XAttr ecXAttr =
+      XAttrHelper.buildXAttr(XATTR_ERASURECODING_POLICY);
 
   /**
    * Construct a PB FsImage writer to generate text file.
@@ -1040,7 +1043,7 @@ abstract class PBImageTextWriter implements Closeable {
     List<XAttr> xattrs =
         FSImageFormatPBINode.Loader.loadXAttrs(xattrFeatureProto, stringTable);
     for (XAttr xattr : xattrs) {
-      if (XATTR_ERASURECODING_POLICY.contains(xattr.getName())){
+      if (xattr.equalsIgnoreValue(ecXAttr)){
         try{
           ByteArrayInputStream bIn = new ByteArrayInputStream(xattr.getValue());
           DataInputStream dIn = new DataInputStream(bIn);

+ 44 - 2
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java

@@ -29,6 +29,7 @@ import javax.xml.transform.stream.StreamResult;
 import java.io.BufferedReader;
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
+import java.io.DataOutputStream;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileOutputStream;
@@ -66,6 +67,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystemTestHelper;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.SafeModeAction;
+import org.apache.hadoop.fs.XAttr;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.permission.PermissionStatus;
@@ -73,20 +75,26 @@ import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DFSTestUtil;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.XAttrHelper;
 import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
 import org.apache.hadoop.hdfs.protocol.BlockType;
 import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
 import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState;
 import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies;
 import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos;
+import org.apache.hadoop.hdfs.protocolPB.PBHelperClient;
 import org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil;
 import org.apache.hadoop.hdfs.server.namenode.FsImageProto;
+import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto;
+import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto;
 import org.apache.hadoop.hdfs.server.namenode.INodeFile;
 import org.apache.hadoop.hdfs.server.namenode.NameNodeLayoutVersion;
+import org.apache.hadoop.hdfs.server.namenode.XAttrFormat;
 import org.apache.hadoop.hdfs.util.MD5FileUtils;
 import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.MD5Hash;
+import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.io.erasurecode.ECSchema;
 import org.apache.hadoop.io.erasurecode.ErasureCodeConstants;
 import org.apache.hadoop.net.NetUtils;
@@ -719,7 +727,13 @@ public class TestOfflineImageViewer {
         .build();
   }
 
-  private FsImageProto.INodeSection.INode createSampleDirInode() {
+  private FsImageProto.INodeSection.INode createSampleDirInode()
+      throws IOException {
+    return createSampleDirInode(false);
+  }
+
+  private FsImageProto.INodeSection.INode createSampleDirInode(
+      boolean builXAttr) throws IOException {
     FsImageProto.INodeSection.AclFeatureProto.Builder acl =
         FsImageProto.INodeSection.AclFeatureProto.newBuilder()
             .addEntries(2);
@@ -729,6 +743,19 @@ public class TestOfflineImageViewer {
             .setNsQuota(700)
             .setModificationTime(SAMPLE_TIMESTAMP)
             .setAcl(acl);
+    if (builXAttr) {
+      ByteArrayOutputStream bOut = new ByteArrayOutputStream();
+      DataOutputStream dOut = new DataOutputStream(bOut);
+      WritableUtils.writeString(dOut, "test-value");
+      XAttr a = XAttrHelper.buildXAttr("system.hdfs", bOut.toByteArray());
+      XAttrFeatureProto.Builder b = XAttrFeatureProto.newBuilder();
+      XAttrCompactProto.Builder xAttrCompactBuilder = XAttrCompactProto.newBuilder();
+      int v = XAttrFormat.toInt(a);
+      xAttrCompactBuilder.setName(v);
+      xAttrCompactBuilder.setValue(PBHelperClient.getByteString(a.getValue()));
+      b.addXAttrs(xAttrCompactBuilder.build());
+      directory.setXAttrs(b);
+    }
 
     return FsImageProto.INodeSection.INode.newBuilder()
         .setType(FsImageProto.INodeSection.INode.Type.DIRECTORY)
@@ -754,6 +781,11 @@ public class TestOfflineImageViewer {
 
   private PBImageDelimitedTextWriter createDelimitedWriterSpy()
       throws IOException {
+    return createDelimitedWriterSpy(false);
+  }
+
+  private PBImageDelimitedTextWriter createDelimitedWriterSpy(boolean printECPolicy)
+      throws IOException {
     FsPermission fsPermission = new FsPermission(
         FsAction.ALL,
         FsAction.WRITE_EXECUTE,
@@ -764,7 +796,9 @@ public class TestOfflineImageViewer {
         fsPermission);
 
     PBImageDelimitedTextWriter writer = new
-        PBImageDelimitedTextWriter(null, ",", "");
+        PBImageDelimitedTextWriter(null, ",", "", false,
+        printECPolicy, 1, "-", new Configuration());
+
     PBImageDelimitedTextWriter writerSpy = spy(writer);
     when(writerSpy.getPermission(anyLong())).thenReturn(permStatus);
     return writerSpy;
@@ -786,6 +820,14 @@ public class TestOfflineImageViewer {
             createSampleDirInode()));
   }
 
+  @Test
+  public void testECXAttr() throws IOException {
+    assertEquals("/path/dir,0,2000-01-01 00:00,1970-01-01 00:00" +
+            ",0,0,0,700,1000,drwx-wx-w-+,user_1,group_1,-",
+        createDelimitedWriterSpy(true).getEntry("/path/",
+            createSampleDirInode(true)));
+  }
+
   @Test
   public void testWriterOutputEntryBuilderForSymlink() throws IOException {
     assertEquals("/path/sym,0,2000-01-01 00:00,2000-01-01 00:00" +