Procházet zdrojové kódy

HDFS-13744. OIV tool should better handle control characters present in file or directory names. Contributed by Zsolt Venczel.

Sean Mackrory před 6 roky
rodič
revize
410dd3faa5

+ 15 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageDelimitedTextWriter.java

@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.hdfs.tools.offlineImageViewer;
 
+import org.apache.commons.lang3.StringUtils;
+import org.apache.commons.text.StringEscapeUtils;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.PermissionStatus;
 import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode;
@@ -71,9 +73,19 @@ public class PBImageDelimitedTextWriter extends PBImageTextWriter {
     buffer.append(field);
   }
 
+  static final String CRLF = StringUtils.CR + StringUtils.LF;
+
   private void append(StringBuffer buffer, String field) {
     buffer.append(delimiter);
-    buffer.append(field);
+
+    String escapedField = StringEscapeUtils.escapeCsv(field);
+    if (escapedField.contains(CRLF)) {
+      escapedField = escapedField.replace(CRLF, "%x0D%x0A");
+    } else if (escapedField.contains(StringUtils.LF)) {
+      escapedField = escapedField.replace(StringUtils.LF, "%x0A");
+    }
+
+    buffer.append(escapedField);
   }
 
   @Override
@@ -82,7 +94,7 @@ public class PBImageDelimitedTextWriter extends PBImageTextWriter {
     String inodeName = inode.getName().toStringUtf8();
     Path path = new Path(parent.isEmpty() ? "/" : parent,
       inodeName.isEmpty() ? "/" : inodeName);
-    buffer.append(path.toString());
+    append(buffer, path.toString());
     PermissionStatus p = null;
     boolean isDir = false;
     boolean hasAcl = false;
@@ -136,7 +148,7 @@ public class PBImageDelimitedTextWriter extends PBImageTextWriter {
     append(buffer, dirString + p.getPermission().toString() + aclString);
     append(buffer, p.getUserName());
     append(buffer, p.getGroupName());
-    return buffer.toString();
+    return buffer.substring(1);
   }
 
   @Override

+ 17 - 0
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java

@@ -27,6 +27,8 @@ import static org.apache.hadoop.fs.permission.AclEntryType.USER;
 import static org.apache.hadoop.fs.permission.FsAction.ALL;
 import static org.apache.hadoop.fs.permission.FsAction.EXECUTE;
 import static org.apache.hadoop.fs.permission.FsAction.READ_EXECUTE;
+
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
 import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState;
 import static org.apache.hadoop.hdfs.server.namenode.AclTestHelpers.aclEntry;
@@ -209,6 +211,21 @@ public class TestOfflineImageViewer {
       writtenFiles.put(entityRefXMLDir.toString(),
           hdfs.getFileStatus(entityRefXMLDir));
 
+      //Create directories with new line characters
+      Path newLFDir = new Path("/dirContainingNewLineChar"
+          + StringUtils.LF + "here");
+      hdfs.mkdirs(newLFDir);
+      dirCount++;
+      writtenFiles.put("\"/dirContainingNewLineChar%x0Ahere\"",
+          hdfs.getFileStatus(newLFDir));
+
+      Path newCRLFDir = new Path("/dirContainingNewLineChar"
+          + PBImageDelimitedTextWriter.CRLF + "here");
+      hdfs.mkdirs(newCRLFDir);
+      dirCount++;
+      writtenFiles.put("\"/dirContainingNewLineChar%x0D%x0Ahere\"",
+          hdfs.getFileStatus(newCRLFDir));
+
       //Create a directory with sticky bits
       Path stickyBitDir = new Path("/stickyBit");
       hdfs.mkdirs(stickyBitDir);