Browse Source

svn merge -c 1353800 FIXES: HDFS-3516. Check content-type in WebHdfsFileSystem.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1363000 13f79535-47bb-0310-9956-ffa450edef68
Daryn Sharp 13 năm trước cách đây
mục cha
commit
66d5ce6ae9

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

@@ -10,6 +10,8 @@ Release 0.23.3 - UNRELEASED
 
   IMPROVEMENTS
 
+    HDFS-3516. Check content-type in WebHdfsFileSystem.  (szetszwo)
+
   OPTIMIZATIONS
 
   BUG FIXES

+ 20 - 4
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java

@@ -34,6 +34,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.StringTokenizer;
 
+import javax.ws.rs.core.MediaType;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -241,9 +243,23 @@ public class WebHdfsFileSystem extends FileSystem
     return f.isAbsolute()? f: new Path(workingDir, f);
   }
 
-  static Map<?, ?> jsonParse(final InputStream in) throws IOException {
+  static Map<?, ?> jsonParse(final HttpURLConnection c, final boolean useErrorStream
+      ) throws IOException {
+    if (c.getContentLength() == 0) {
+      return null;
+    }
+    final InputStream in = useErrorStream? c.getErrorStream(): c.getInputStream();
     if (in == null) {
-      throw new IOException("The input stream is null.");
+      throw new IOException("The " + (useErrorStream? "error": "input") + " stream is null.");
+    }
+    final String contentType = c.getContentType();
+    if (contentType != null) {
+      final MediaType parsed = MediaType.valueOf(contentType);
+      if (!MediaType.APPLICATION_JSON_TYPE.isCompatible(parsed)) {
+        throw new IOException("Content-Type \"" + contentType
+            + "\" is incompatible with \"" + MediaType.APPLICATION_JSON
+            + "\" (parsed=\"" + parsed + "\")");
+      }
     }
     return (Map<?, ?>)JSON.parse(new InputStreamReader(in));
   }
@@ -254,7 +270,7 @@ public class WebHdfsFileSystem extends FileSystem
     if (code != op.getExpectedHttpResponseCode()) {
       final Map<?, ?> m;
       try {
-        m = jsonParse(conn.getErrorStream());
+        m = jsonParse(conn, true);
       } catch(IOException e) {
         throw new IOException("Unexpected HTTP response: code=" + code + " != "
             + op.getExpectedHttpResponseCode() + ", " + op.toQueryString()
@@ -414,7 +430,7 @@ public class WebHdfsFileSystem extends FileSystem
     final HttpURLConnection conn = httpConnect(op, fspath, parameters);
     try {
       final Map<?, ?> m = validateResponse(op, conn);
-      return m != null? m: jsonParse(conn.getInputStream());
+      return m != null? m: jsonParse(conn, false);
     } finally {
       conn.disconnect();
     }

+ 20 - 0
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java

@@ -278,6 +278,10 @@ public class TestWebHdfsFileSystemContract extends FileSystemContractBaseTest {
     final Path root = new Path("/");
     final Path dir = new Path("/test/testUrl");
     assertTrue(webhdfs.mkdirs(dir));
+    final Path file = new Path("/test/file");
+    final FSDataOutputStream out = webhdfs.create(file);
+    out.write(1);
+    out.close();
 
     {//test GETHOMEDIRECTORY
       final URL url = webhdfs.toUrl(GetOpParam.Op.GETHOMEDIRECTORY, root);
@@ -339,5 +343,21 @@ public class TestWebHdfsFileSystemContract extends FileSystemContractBaseTest {
       assertEquals((short)0755, webhdfs.getFileStatus(dir).getPermission().toShort());
       conn.disconnect();
     }
+
+    {//test jsonParse with non-json type.
+      final HttpOpParam.Op op = GetOpParam.Op.OPEN;
+      final URL url = webhdfs.toUrl(op, file);
+      final HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+      conn.setRequestMethod(op.getType().toString());
+      conn.connect();
+
+      try {
+        WebHdfsFileSystem.jsonParse(conn, false);
+        fail();
+      } catch(IOException ioe) {
+        WebHdfsFileSystem.LOG.info("GOOD", ioe);
+      }
+      conn.disconnect();
+    }
   }
 }

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java

@@ -49,7 +49,7 @@ public class WebHdfsTestUtil {
 
   public static WebHdfsFileSystem getWebHdfsFileSystemAs(
       final UserGroupInformation ugi, final Configuration conf
-      ) throws IOException, URISyntaxException, InterruptedException {
+      ) throws IOException, InterruptedException {
     return ugi.doAs(new PrivilegedExceptionAction<WebHdfsFileSystem>() {
       @Override
       public WebHdfsFileSystem run() throws Exception {
@@ -70,7 +70,7 @@ public class WebHdfsTestUtil {
       final int expectedResponseCode) throws IOException {
     conn.connect();
     Assert.assertEquals(expectedResponseCode, conn.getResponseCode());
-    return WebHdfsFileSystem.jsonParse(conn.getInputStream());
+    return WebHdfsFileSystem.jsonParse(conn, false);
   }
   
   public static HttpURLConnection twoStepWrite(HttpURLConnection conn,