Browse Source

HDFS-1109. HFTP supports filenames that contains the character "+".
(Dmytro Molkov via dhruba)



git-svn-id: https://svn.apache.org/repos/asf/hadoop/hdfs/trunk@942847 13f79535-47bb-0310-9956-ffa450edef68

Dhruba Borthakur 15 years ago
parent
commit
98b82a728b

+ 3 - 0
CHANGES.txt

@@ -502,6 +502,9 @@ Release 0.21.0 - Unreleased
     HDFS-1028. Efficient splitting of path components reduces the time
     to load in fsimage by 20%. (Dmytro Molkov via dhruba)
 
+    HDFS-1109. HFTP supports filenames that contains the character "+".
+    (Dmytro Molkov via dhruba)
+
   OPTIMIZATIONS
 
     HDFS-946. NameNode should not return full path name when lisitng a

+ 2 - 2
src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java

@@ -40,8 +40,8 @@ public class ProxyFileDataServlet extends FileDataServlet {
       ClientProtocol nnproxy, HttpServletRequest request) throws IOException,
       URISyntaxException {
     return new URI(request.getScheme(), null, request.getServerName(), request
-        .getServerPort(), "/streamFile", "filename=" + i.getFullName(parent)
-        + "&ugi=" + ugi.getShortUserName(), null);
+        .getServerPort(), "/streamFile" + i.getFullName(parent),
+        "&ugi=" + ugi.getShortUserName(), null);
   }
 
   /** {@inheritDoc} */

+ 2 - 2
src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileForward.java

@@ -30,12 +30,12 @@ public class ProxyFileForward extends ProxyForwardServlet {
   @Override
   protected String buildForwardPath(HttpServletRequest request, String pathInfo) {
     String path = "/streamFile";
-    path += "?filename=" + request.getPathInfo();
+    path += request.getPathInfo();
     String userID = (String) request.
         getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
     UserGroupInformation ugi = ProxyUtil.getProxyUGIFor(userID);
     if (ugi != null) {
-      path += "&ugi=" + ugi.getShortUserName();
+      path += "?ugi=" + ugi.getShortUserName();
     }
     return path;
   }

+ 8 - 8
src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestAuthorizationFilter.java

@@ -63,8 +63,8 @@ public class TestAuthorizationFilter extends FilterTestCase {
   }
 
   public void beginPathRestriction(WebRequest theRequest) {
-    theRequest.setURL("proxy-test:0", null, "/streamFile", null,
-        "filename=/nontestdir");
+    theRequest.setURL("proxy-test:0", null, "/streamFile/nontestdir",
+      null,null);
   }
 
   public void testPathRestriction() throws ServletException, IOException {
@@ -91,8 +91,8 @@ public class TestAuthorizationFilter extends FilterTestCase {
   }
 
   public void beginPathPermit(WebRequest theRequest) {
-    theRequest.setURL("proxy-test:0", null, "/streamFile", null,
-        "filename=/data/file");
+    theRequest.setURL("proxy-test:0", null, "/streamFile/data/file",
+      null, null);
   }
 
   public void testPathPermit() throws ServletException, IOException {
@@ -114,8 +114,8 @@ public class TestAuthorizationFilter extends FilterTestCase {
   }
 
   public void beginPathPermitQualified(WebRequest theRequest) {
-    theRequest.setURL("proxy-test:0", null, "/streamFile", null,
-        "filename=/data/file");
+    theRequest.setURL("proxy-test:0", null, "/streamFile/data/file",
+      null, null);
   }
 
   public void testPathPermitQualified() throws ServletException, IOException {
@@ -137,8 +137,8 @@ public class TestAuthorizationFilter extends FilterTestCase {
   }
   
   public void beginPathQualifiediReject(WebRequest theRequest) {
-    theRequest.setURL("proxy-test:0", null, "/streamFile", null,
-        "filename=/data/file");
+    theRequest.setURL("proxy-test:0", null, "/streamFile/data/file",
+      null, null);
   }
 
   public void testPathQualifiedReject() throws ServletException, IOException {

+ 2 - 2
src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestLdapIpDirFilter.java

@@ -76,8 +76,8 @@ public class TestLdapIpDirFilter extends FilterTestCase {
   }
 
   public void beginDoFilter(WebRequest theRequest) {
-    theRequest.setURL("proxy-test:0", null, "/streamFile", null,
-        "filename=/testdir");
+    theRequest.setURL("proxy-test:0", null, "/streamFile/testdir",
+      null, null);
   }
 
   public void testDoFilter() throws ServletException, IOException,

+ 3 - 1
src/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java

@@ -236,7 +236,9 @@ public class DatanodeJspHelper {
     else
       startOffset = Long.parseLong(startOffsetStr);
 
-    final String filename=JspHelper.validatePath(req.getParameter("filename"));
+    final String filename=JspHelper.validatePath(
+                          req.getPathInfo() == null ? 
+                          "/" : req.getPathInfo());
     if (filename == null) {
       out.print("Invalid input");
       return;

+ 5 - 4
src/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java

@@ -59,8 +59,8 @@ public class FileDataServlet extends DfsServlet {
         "https".equals(scheme)
           ? (Integer)getServletContext().getAttribute("datanode.https.port")
           : host.getInfoPort(),
-            "/streamFile", "filename=" + i.getFullName(parent) + 
-            "&ugi=" + ugi.getShortUserName(), null);
+            "/streamFile" + i.getFullName(parent), 
+            "ugi=" + ugi.getShortUserName(), null);
   }
 
   /** Select a datanode to service this request.
@@ -105,8 +105,9 @@ public class FileDataServlet extends DfsServlet {
                                                     request.getPathInfo() : "/";
       HdfsFileStatus info = nnproxy.getFileInfo(path);
       if ((info != null) && !info.isDir()) {
-        response.sendRedirect(createUri(path, info, ugi, nnproxy,
-              request).toURL().toString());
+        String redirect = createUri(path, info, ugi, nnproxy,
+              request).toURL().toString();
+        response.sendRedirect(redirect);
       } else if (info == null){
         response.sendError(400, "cat: File not found " + path);
       } else {

+ 4 - 3
src/java/org/apache/hadoop/hdfs/server/namenode/StreamFile.java

@@ -52,7 +52,7 @@ public class StreamFile extends DfsServlet {
   protected DFSClient getDFSClient(HttpServletRequest request)
       throws IOException, InterruptedException {
     final Configuration conf =
-      (Configuration) getServletContext().getAttribute("name.conf");
+      (Configuration) getServletContext().getAttribute("datanode.conf");
     
     UserGroupInformation ugi = getUGI(request, conf);
     DFSClient client = ugi.doAs(new PrivilegedExceptionAction<DFSClient>() {
@@ -67,8 +67,9 @@ public class StreamFile extends DfsServlet {
   
   public void doGet(HttpServletRequest request, HttpServletResponse response)
     throws ServletException, IOException {
-    final String filename = JspHelper.validatePath(
-        request.getParameter("filename"));
+    final String path = request.getPathInfo() != null ? 
+                                        request.getPathInfo() : "/";
+    final String filename = JspHelper.validatePath(path);
     if (filename == null) {
       response.setContentType("text/plain");
       PrintWriter out = response.getWriter();