Parcourir la source

HDFS-5893. HftpFileSystem.RangeHeaderUrlOpener uses the default URLConnectionFactory which does not import SSL certificates. Contributed by Haohui Mai.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1569477 13f79535-47bb-0310-9956-ffa450edef68
Jing Zhao il y a 11 ans
Parent
commit
a845a18c67

+ 4 - 0
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

@@ -435,6 +435,10 @@ Release 2.4.0 - UNRELEASED
 
     HDFS-5803. TestBalancer.testBalancer0 fails. (Chen He via kihwal)
 
+    HDFS-5893. HftpFileSystem.RangeHeaderUrlOpener uses the default
+    URLConnectionFactory which does not import SSL certificates. (Haohui Mai via
+    jing9)
+
   BREAKDOWN OF HDFS-5698 SUBTASKS AND RELATED JIRAS
 
     HDFS-5717. Save FSImage header in protobuf. (Haohui Mai via jing9)

+ 4 - 10
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java

@@ -27,7 +27,6 @@ import javax.servlet.http.HttpServletResponse;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
@@ -61,18 +60,13 @@ public class FileDataServlet extends DfsServlet {
     } else {
       hostname = host.getIpAddr();
     }
-    int port = host.getInfoPort();
-    if ("https".equals(scheme)) {
-      final Integer portObject = (Integer) getServletContext().getAttribute(
-          DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY);
-      if (portObject != null) {
-        port = portObject;
-      }
-    }
+
+    int port = "https".equals(scheme) ? host.getInfoSecurePort() : host
+        .getInfoPort();
 
     String dtParam = "";
     if (dt != null) {
-      dtParam=JspHelper.getDelegationTokenUrlParam(dt);
+      dtParam = JspHelper.getDelegationTokenUrlParam(dt);
     }
 
     // Add namenode address to the url params

+ 8 - 6
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java

@@ -344,14 +344,15 @@ public class HftpFileSystem extends FileSystem
   }
 
   static class RangeHeaderUrlOpener extends ByteRangeInputStream.URLOpener {
-    URLConnectionFactory connectionFactory = URLConnectionFactory.DEFAULT_SYSTEM_CONNECTION_FACTORY;
+    private final URLConnectionFactory connFactory;
 
-    RangeHeaderUrlOpener(final URL url) {
+    RangeHeaderUrlOpener(URLConnectionFactory connFactory, final URL url) {
       super(url);
+      this.connFactory = connFactory;
     }
 
     protected HttpURLConnection openConnection() throws IOException {
-      return (HttpURLConnection)connectionFactory.openConnection(url);
+      return (HttpURLConnection)connFactory.openConnection(url);
     }
 
     /** Use HTTP Range header for specifying offset. */
@@ -381,8 +382,9 @@ public class HftpFileSystem extends FileSystem
       super(o, r);
     }
 
-    RangeHeaderInputStream(final URL url) {
-      this(new RangeHeaderUrlOpener(url), new RangeHeaderUrlOpener(null));
+    RangeHeaderInputStream(URLConnectionFactory connFactory, final URL url) {
+      this(new RangeHeaderUrlOpener(connFactory, url),
+          new RangeHeaderUrlOpener(connFactory, null));
     }
 
     @Override
@@ -397,7 +399,7 @@ public class HftpFileSystem extends FileSystem
     String path = "/data" + ServletUtil.encodePath(f.toUri().getPath());
     String query = addDelegationTokenParam("ugi=" + getEncodedUgiParameter());
     URL u = getNamenodeURL(path, query);
-    return new FSDataInputStream(new RangeHeaderInputStream(u));
+    return new FSDataInputStream(new RangeHeaderInputStream(connectionFactory, u));
   }
 
   @Override

+ 8 - 4
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestByteRangeInputStream.java

@@ -97,12 +97,13 @@ public static class MockHttpURLConnection extends HttpURLConnection {
 
   @Test
   public void testByteRange() throws IOException {
+    URLConnectionFactory factory = mock(URLConnectionFactory.class);
     HftpFileSystem.RangeHeaderUrlOpener ospy = spy(
-        new HftpFileSystem.RangeHeaderUrlOpener(new URL("http://test/")));
+        new HftpFileSystem.RangeHeaderUrlOpener(factory, new URL("http://test/")));
     doReturn(new MockHttpURLConnection(ospy.getURL())).when(ospy)
         .openConnection();
     HftpFileSystem.RangeHeaderUrlOpener rspy = spy(
-        new HftpFileSystem.RangeHeaderUrlOpener((URL) null));
+        new HftpFileSystem.RangeHeaderUrlOpener(factory, (URL) null));
     doReturn(new MockHttpURLConnection(rspy.getURL())).when(rspy)
         .openConnection();
     ByteRangeInputStream is = new HftpFileSystem.RangeHeaderInputStream(ospy, rspy);
@@ -171,12 +172,15 @@ public static class MockHttpURLConnection extends HttpURLConnection {
       assertEquals("Should fail because incorrect response code was sent",
                    "HTTP_OK expected, received 206", e.getMessage());
     }
+    is.close();
   }
 
   @Test
   public void testPropagatedClose() throws IOException {
-    ByteRangeInputStream brs = spy(
-        new HftpFileSystem.RangeHeaderInputStream(new URL("http://test/")));
+    URLConnectionFactory factory = mock(URLConnectionFactory.class);
+
+    ByteRangeInputStream brs = spy(new HftpFileSystem.RangeHeaderInputStream(
+        factory, new URL("http://test/")));
 
     InputStream mockStream = mock(InputStream.class);
     doReturn(mockStream).when(brs).openInputStream();

+ 9 - 2
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java

@@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.web;
 
 import java.io.File;
 import java.io.InputStream;
+import java.io.OutputStream;
 import java.net.InetSocketAddress;
 import java.net.URI;
 
@@ -30,6 +31,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.http.HttpConfig;
+import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
 import org.junit.AfterClass;
 import org.junit.Assert;
@@ -65,9 +67,11 @@ public class TestHttpsFileSystem {
 
     cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
     cluster.waitActive();
-    cluster.getFileSystem().create(new Path("/test")).close();
+    OutputStream os = cluster.getFileSystem().create(new Path("/test"));
+    os.write(23);
+    os.close();
     InetSocketAddress addr = cluster.getNameNode().getHttpsAddress();
-    nnAddr = addr.getHostName() + ":" + addr.getPort();
+    nnAddr = NetUtils.getHostPortString(addr);
     conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, nnAddr);
   }
 
@@ -82,6 +86,9 @@ public class TestHttpsFileSystem {
   public void testHsftpFileSystem() throws Exception {
     FileSystem fs = FileSystem.get(new URI("hsftp://" + nnAddr), conf);
     Assert.assertTrue(fs.exists(new Path("/test")));
+    InputStream is = fs.open(new Path("/test"));
+    Assert.assertEquals(23, is.read());
+    is.close();
     fs.close();
   }