浏览代码

HDFS-5873. dfs.http.policy should have higher precedence over dfs.https.enable. Contributed by Haohui Mai.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1564973 13f79535-47bb-0310-9956-ffa450edef68
Jing Zhao 11 年之前
父节点
当前提交
d598b6ef9f

+ 6 - 5
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java

@@ -34,13 +34,14 @@ public class HttpConfig {
     HTTPS_ONLY,
     HTTPS_ONLY,
     HTTP_AND_HTTPS;
     HTTP_AND_HTTPS;
 
 
+    private static final Policy[] VALUES = values();
     public static Policy fromString(String value) {
     public static Policy fromString(String value) {
-      if (HTTPS_ONLY.name().equalsIgnoreCase(value)) {
-        return HTTPS_ONLY;
-      } else if (HTTP_AND_HTTPS.name().equalsIgnoreCase(value)) {
-        return HTTP_AND_HTTPS;
+      for (Policy p : VALUES) {
+        if (p.name().equalsIgnoreCase(value)) {
+          return p;
+        }
       }
       }
-      return HTTP_ONLY;
+      return null;
     }
     }
 
 
     public boolean isHttpEnabled() {
     public boolean isHttpEnabled() {

+ 2 - 1
hadoop-common-project/hadoop-common/src/site/apt/SecureMode.apt.vm

@@ -352,7 +352,8 @@ Configuration for <<<conf/core-site.xml>>>
 | | | This value is deprecated. Use dfs.http.policy |
 | | | This value is deprecated. Use dfs.http.policy |
 *-------------------------+-------------------------+------------------------+
 *-------------------------+-------------------------+------------------------+
 | <<<dfs.http.policy>>> | <HTTP_ONLY> or <HTTPS_ONLY> or <HTTP_AND_HTTPS> | |
 | <<<dfs.http.policy>>> | <HTTP_ONLY> or <HTTPS_ONLY> or <HTTP_AND_HTTPS> | |
-| | | HTTPS_ONLY turns off http access |
+| | | HTTPS_ONLY turns off http access. This option takes precedence over |
+| | | the deprecated configuration dfs.https.enable and hadoop.ssl.enabled. |
 *-------------------------+-------------------------+------------------------+
 *-------------------------+-------------------------+------------------------+
 | <<<dfs.namenode.https-address>>> | <nn_host_fqdn:50470> | |
 | <<<dfs.namenode.https-address>>> | <nn_host_fqdn:50470> | |
 *-------------------------+-------------------------+------------------------+
 *-------------------------+-------------------------+------------------------+

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

@@ -872,6 +872,9 @@ Release 2.3.0 - UNRELEASED
     HDFS-5876. SecureDataNodeStarter does not pick up configuration in 
     HDFS-5876. SecureDataNodeStarter does not pick up configuration in 
     hdfs-site.xml. (Haohui Mai via jing9)
     hdfs-site.xml. (Haohui Mai via jing9)
 
 
+    HDFS-5873. dfs.http.policy should have higher precedence over dfs.https.enable.
+    (Haohui Mai via jing9)
+
   BREAKDOWN OF HDFS-2832 SUBTASKS AND RELATED JIRAS
   BREAKDOWN OF HDFS-2832 SUBTASKS AND RELATED JIRAS
 
 
     HDFS-4985. Add storage type to the protocol and expose it in block report
     HDFS-4985. Add storage type to the protocol and expose it in block report

+ 20 - 17
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java

@@ -1567,31 +1567,34 @@ public class DFSUtil {
    * configuration settings.
    * configuration settings.
    */
    */
   public static HttpConfig.Policy getHttpPolicy(Configuration conf) {
   public static HttpConfig.Policy getHttpPolicy(Configuration conf) {
-    String httpPolicy = conf.get(DFSConfigKeys.DFS_HTTP_POLICY_KEY,
-        DFSConfigKeys.DFS_HTTP_POLICY_DEFAULT);
-
-    HttpConfig.Policy policy = HttpConfig.Policy.fromString(httpPolicy);
-
-    if (policy == HttpConfig.Policy.HTTP_ONLY) {
-      boolean httpsEnabled = conf.getBoolean(
-          DFSConfigKeys.DFS_HTTPS_ENABLE_KEY,
+    String policyStr = conf.get(DFSConfigKeys.DFS_HTTP_POLICY_KEY);
+    if (policyStr == null) {
+      boolean https = conf.getBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY,
           DFSConfigKeys.DFS_HTTPS_ENABLE_DEFAULT);
           DFSConfigKeys.DFS_HTTPS_ENABLE_DEFAULT);
 
 
-      boolean hadoopSslEnabled = conf.getBoolean(
+      boolean hadoopSsl = conf.getBoolean(
           CommonConfigurationKeys.HADOOP_SSL_ENABLED_KEY,
           CommonConfigurationKeys.HADOOP_SSL_ENABLED_KEY,
           CommonConfigurationKeys.HADOOP_SSL_ENABLED_DEFAULT);
           CommonConfigurationKeys.HADOOP_SSL_ENABLED_DEFAULT);
 
 
-      if (hadoopSslEnabled) {
+      if (hadoopSsl) {
         LOG.warn(CommonConfigurationKeys.HADOOP_SSL_ENABLED_KEY
         LOG.warn(CommonConfigurationKeys.HADOOP_SSL_ENABLED_KEY
-            + " is deprecated. Please use "
-            + DFSConfigKeys.DFS_HTTPS_ENABLE_KEY + ".");
-        policy = HttpConfig.Policy.HTTPS_ONLY;
-      } else if (httpsEnabled) {
+            + " is deprecated. Please use " + DFSConfigKeys.DFS_HTTP_POLICY_KEY
+            + ".");
+      }
+      if (https) {
         LOG.warn(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY
         LOG.warn(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY
-            + " is deprecated. Please use "
-            + DFSConfigKeys.DFS_HTTPS_ENABLE_KEY + ".");
-        policy = HttpConfig.Policy.HTTP_AND_HTTPS;
+            + " is deprecated. Please use " + DFSConfigKeys.DFS_HTTP_POLICY_KEY
+            + ".");
       }
       }
+
+      return (hadoopSsl || https) ? HttpConfig.Policy.HTTP_AND_HTTPS
+          : HttpConfig.Policy.HTTP_ONLY;
+    }
+
+    HttpConfig.Policy policy = HttpConfig.Policy.fromString(policyStr);
+    if (policy == null) {
+      throw new HadoopIllegalArgumentException("Unregonized value '"
+          + policyStr + "' for " + DFSConfigKeys.DFS_HTTP_POLICY_KEY);
     }
     }
 
 
     conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, policy.name());
     conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, policy.name());

+ 54 - 0
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHttpPolicy.java

@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs;
+
+import static org.apache.hadoop.http.HttpConfig.Policy.HTTP_AND_HTTPS;
+import static org.apache.hadoop.http.HttpConfig.Policy.HTTP_ONLY;
+
+import org.apache.hadoop.HadoopIllegalArgumentException;
+import org.apache.hadoop.conf.Configuration;
+import org.junit.Assert;
+import org.junit.Test;
+
+public final class TestHttpPolicy {
+
+  @Test(expected = HadoopIllegalArgumentException.class)
+  public void testInvalidPolicyValue() {
+    Configuration conf = new Configuration();
+    conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, "invalid");
+    DFSUtil.getHttpPolicy(conf);
+  }
+
+  @Test
+  public void testDeprecatedConfiguration() {
+    Configuration conf = new Configuration(false);
+    Assert.assertSame(HTTP_ONLY, DFSUtil.getHttpPolicy(conf));
+
+    conf.setBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY, true);
+    Assert.assertSame(HTTP_AND_HTTPS, DFSUtil.getHttpPolicy(conf));
+
+    conf = new Configuration(false);
+    conf.setBoolean(DFSConfigKeys.HADOOP_SSL_ENABLED_KEY, true);
+    Assert.assertSame(HTTP_AND_HTTPS, DFSUtil.getHttpPolicy(conf));
+
+    conf = new Configuration(false);
+    conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HTTP_ONLY.name());
+    conf.setBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY, true);
+    Assert.assertSame(HTTP_ONLY, DFSUtil.getHttpPolicy(conf));
+  }
+}

+ 3 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeHttpServer.java

@@ -104,7 +104,9 @@ public class TestNameNodeHttpServer {
           server.getHttpsAddress() == null));
           server.getHttpsAddress() == null));
 
 
     } finally {
     } finally {
-      server.stop();
+      if (server != null) {
+        server.stop();
+      }
     }
     }
   }
   }