فهرست منبع

HADOOP-10348. Deprecate hadoop.ssl.configuration in branch-2, and remove it in trunk. Contributed by Haohui Mai.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1570295 13f79535-47bb-0310-9956-ffa450edef68
Jing Zhao 11 سال پیش
والد
کامیت
8b196816d8

+ 3 - 0
hadoop-common-project/hadoop-common/CHANGES.txt

@@ -340,6 +340,9 @@ Release 2.5.0 - UNRELEASED
 
   IMPROVEMENTS
 
+    HADOOP-10348. Deprecate hadoop.ssl.configuration in branch-2, and remove
+    it in trunk. (Haohui Mai via jing9)
+
   OPTIMIZATIONS
 
   BUG FIXES 

+ 0 - 10
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java

@@ -268,21 +268,11 @@ public class CommonConfigurationKeysPublic {
   public static final String  HADOOP_SECURITY_AUTH_TO_LOCAL =
     "hadoop.security.auth_to_local";
 
-  public static final String HADOOP_SSL_ENABLED_KEY = "hadoop.ssl.enabled";
-  public static final boolean HADOOP_SSL_ENABLED_DEFAULT = false;
-
   /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
   public static final String HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN =
           "hadoop.kerberos.min.seconds.before.relogin";
   /** Default value for HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN */
   public static final int HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN_DEFAULT =
           60;
-
-  // HTTP policies to be used in configuration
-  // Use HttpPolicy.name() instead
-  @Deprecated
-  public static final String HTTP_POLICY_HTTP_ONLY = "HTTP_ONLY";
-  @Deprecated
-  public static final String HTTP_POLICY_HTTPS_ONLY = "HTTPS_ONLY";
 }
 

+ 0 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java

@@ -377,8 +377,6 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
   public static final int     DFS_NAMENODE_SERVICE_HANDLER_COUNT_DEFAULT = 10;
   public static final String  DFS_SUPPORT_APPEND_KEY = "dfs.support.append";
   public static final boolean DFS_SUPPORT_APPEND_DEFAULT = true;
-  public static final String  DFS_HTTPS_ENABLE_KEY = "dfs.https.enable";
-  public static final boolean DFS_HTTPS_ENABLE_DEFAULT = false;
   public static final String  DFS_HTTP_POLICY_KEY = "dfs.http.policy";
   public static final String  DFS_HTTP_POLICY_DEFAULT =  HttpConfig.Policy.HTTP_ONLY.name();
   public static final String  DFS_DEFAULT_CHUNK_VIEW_SIZE_KEY = "dfs.default.chunk.view.size";

+ 3 - 36
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java

@@ -1553,44 +1553,11 @@ public class DFSUtil {
   }
 
   /**
-   * Get http policy. Http Policy is chosen as follows:
-   * <ol>
-   * <li>If hadoop.ssl.enabled is set, http endpoints are not started. Only
-   * https endpoints are started on configured https ports</li>
-   * <li>This configuration is overridden by dfs.https.enable configuration, if
-   * it is set to true. In that case, both http and https endpoints are stared.</li>
-   * <li>All the above configurations are overridden by dfs.http.policy
-   * configuration. With this configuration you can set http-only, https-only
-   * and http-and-https endpoints.</li>
-   * </ol>
-   * See hdfs-default.xml documentation for more details on each of the above
-   * configuration settings.
+   * Get http policy.
    */
   public static HttpConfig.Policy getHttpPolicy(Configuration conf) {
-    String policyStr = conf.get(DFSConfigKeys.DFS_HTTP_POLICY_KEY);
-    if (policyStr == null) {
-      boolean https = conf.getBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY,
-          DFSConfigKeys.DFS_HTTPS_ENABLE_DEFAULT);
-
-      boolean hadoopSsl = conf.getBoolean(
-          CommonConfigurationKeys.HADOOP_SSL_ENABLED_KEY,
-          CommonConfigurationKeys.HADOOP_SSL_ENABLED_DEFAULT);
-
-      if (hadoopSsl) {
-        LOG.warn(CommonConfigurationKeys.HADOOP_SSL_ENABLED_KEY
-            + " is deprecated. Please use " + DFSConfigKeys.DFS_HTTP_POLICY_KEY
-            + ".");
-      }
-      if (https) {
-        LOG.warn(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY
-            + " is deprecated. Please use " + DFSConfigKeys.DFS_HTTP_POLICY_KEY
-            + ".");
-      }
-
-      return (hadoopSsl || https) ? HttpConfig.Policy.HTTP_AND_HTTPS
-          : HttpConfig.Policy.HTTP_ONLY;
-    }
-
+    String policyStr = conf.get(DFSConfigKeys.DFS_HTTP_POLICY_KEY,
+        DFSConfigKeys.DFS_HTTP_POLICY_DEFAULT);
     HttpConfig.Policy policy = HttpConfig.Policy.fromString(policyStr);
     if (policy == null) {
       throw new HadoopIllegalArgumentException("Unregonized value '"

+ 0 - 22
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHttpPolicy.java

@@ -17,12 +17,8 @@
  */
 package org.apache.hadoop.hdfs;
 
-import static org.apache.hadoop.http.HttpConfig.Policy.HTTP_AND_HTTPS;
-import static org.apache.hadoop.http.HttpConfig.Policy.HTTP_ONLY;
-
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.conf.Configuration;
-import org.junit.Assert;
 import org.junit.Test;
 
 public final class TestHttpPolicy {
@@ -33,22 +29,4 @@ public final class TestHttpPolicy {
     conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, "invalid");
     DFSUtil.getHttpPolicy(conf);
   }
-
-  @Test
-  public void testDeprecatedConfiguration() {
-    Configuration conf = new Configuration(false);
-    Assert.assertSame(HTTP_ONLY, DFSUtil.getHttpPolicy(conf));
-
-    conf.setBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY, true);
-    Assert.assertSame(HTTP_AND_HTTPS, DFSUtil.getHttpPolicy(conf));
-
-    conf = new Configuration(false);
-    conf.setBoolean(DFSConfigKeys.HADOOP_SSL_ENABLED_KEY, true);
-    Assert.assertSame(HTTP_AND_HTTPS, DFSUtil.getHttpPolicy(conf));
-
-    conf = new Configuration(false);
-    conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HTTP_ONLY.name());
-    conf.setBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY, true);
-    Assert.assertSame(HTTP_ONLY, DFSUtil.getHttpPolicy(conf));
-  }
 }