瀏覽代碼

HDFS-12114. Consistent HttpFS property names. Contributed by John Zhuge.

John Zhuge 7 年之前
父節點
當前提交
ac0a04a6e1

+ 7 - 6
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebServer.java

@@ -46,17 +46,16 @@ public class HttpFSServerWebServer {
   private static final String HTTPFS_SITE_XML = "httpfs-site.xml";
   private static final String HTTPFS_SITE_XML = "httpfs-site.xml";
 
 
   // HTTP properties
   // HTTP properties
-  static final String HTTP_PORT_KEY = "hadoop.httpfs.http.port";
+  static final String HTTP_PORT_KEY = "httpfs.http.port";
   private static final int HTTP_PORT_DEFAULT = 14000;
   private static final int HTTP_PORT_DEFAULT = 14000;
-  static final String HTTP_HOST_KEY = "hadoop.httpfs.http.host";
-  private static final String HTTP_HOST_DEFAULT = "0.0.0.0";
+  static final String HTTP_HOSTNAME_KEY = "httpfs.http.hostname";
+  private static final String HTTP_HOSTNAME_DEFAULT = "0.0.0.0";
 
 
   // SSL properties
   // SSL properties
   static final String SSL_ENABLED_KEY = "httpfs.ssl.enabled";
   static final String SSL_ENABLED_KEY = "httpfs.ssl.enabled";
   private static final boolean SSL_ENABLED_DEFAULT = false;
   private static final boolean SSL_ENABLED_DEFAULT = false;
 
 
-  private static final String HTTP_ADMINS_KEY =
-      "hadoop.httpfs.http.administrators";
+  private static final String HTTP_ADMINS_KEY = "httpfs.http.administrators";
 
 
   private static final String NAME = "webhdfs";
   private static final String NAME = "webhdfs";
   private static final String SERVLET_PATH = "/webhdfs";
   private static final String SERVLET_PATH = "/webhdfs";
@@ -74,6 +73,8 @@ public class HttpFSServerWebServer {
     // Override configuration with deprecated environment variables.
     // Override configuration with deprecated environment variables.
     deprecateEnv("HTTPFS_TEMP", conf, HttpServer2.HTTP_TEMP_DIR_KEY,
     deprecateEnv("HTTPFS_TEMP", conf, HttpServer2.HTTP_TEMP_DIR_KEY,
         HTTPFS_SITE_XML);
         HTTPFS_SITE_XML);
+    deprecateEnv("HTTPFS_HTTP_HOSTNAME", conf, HTTP_HOSTNAME_KEY,
+        HTTPFS_SITE_XML);
     deprecateEnv("HTTPFS_HTTP_PORT", conf, HTTP_PORT_KEY,
     deprecateEnv("HTTPFS_HTTP_PORT", conf, HTTP_PORT_KEY,
         HTTPFS_SITE_XML);
         HTTPFS_SITE_XML);
     deprecateEnv("HTTPFS_MAX_THREADS", conf,
     deprecateEnv("HTTPFS_MAX_THREADS", conf,
@@ -95,7 +96,7 @@ public class HttpFSServerWebServer {
         SSL_ENABLED_DEFAULT);
         SSL_ENABLED_DEFAULT);
     scheme = sslEnabled ? HttpServer2.HTTPS_SCHEME : HttpServer2.HTTP_SCHEME;
     scheme = sslEnabled ? HttpServer2.HTTPS_SCHEME : HttpServer2.HTTP_SCHEME;
 
 
-    String host = conf.get(HTTP_HOST_KEY, HTTP_HOST_DEFAULT);
+    String host = conf.get(HTTP_HOSTNAME_KEY, HTTP_HOSTNAME_DEFAULT);
     int port = conf.getInt(HTTP_PORT_KEY, HTTP_PORT_DEFAULT);
     int port = conf.getInt(HTTP_PORT_KEY, HTTP_PORT_DEFAULT);
     URI endpoint = new URI(scheme, null, host, port, null, null, null);
     URI endpoint = new URI(scheme, null, host, port, null, null, null);
 
 

+ 0 - 17
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/shellprofile.d/hadoop-httpfs.sh

@@ -30,17 +30,6 @@ function hdfs_subcommand_httpfs
     . "${HADOOP_CONF_DIR}/httpfs-env.sh"
     . "${HADOOP_CONF_DIR}/httpfs-env.sh"
   fi
   fi
 
 
-  hadoop_deprecate_envvar HTTPFS_CONFIG HADOOP_CONF_DIR
-  hadoop_deprecate_envvar HTTPFS_LOG HADOOP_LOG_DIR
-
-  hadoop_using_envvar HTTPFS_HTTP_HOSTNAME
-  hadoop_using_envvar HTTPFS_HTTP_PORT
-  hadoop_using_envvar HTTPFS_MAX_HTTP_HEADER_SIZE
-  hadoop_using_envvar HTTPFS_MAX_THREADS
-  hadoop_using_envvar HTTPFS_SSL_ENABLED
-  hadoop_using_envvar HTTPFS_SSL_KEYSTORE_FILE
-  hadoop_using_envvar HTTPFS_TEMP
-
   # shellcheck disable=SC2034
   # shellcheck disable=SC2034
   HADOOP_SUBCMD_SUPPORTDAEMONIZATION=true
   HADOOP_SUBCMD_SUPPORTDAEMONIZATION=true
   # shellcheck disable=SC2034
   # shellcheck disable=SC2034
@@ -53,12 +42,6 @@ function hdfs_subcommand_httpfs
     "-Dhttpfs.config.dir=${HTTPFS_CONFIG:-${HADOOP_CONF_DIR}}"
     "-Dhttpfs.config.dir=${HTTPFS_CONFIG:-${HADOOP_CONF_DIR}}"
   hadoop_add_param HADOOP_OPTS "-Dhttpfs.log.dir" \
   hadoop_add_param HADOOP_OPTS "-Dhttpfs.log.dir" \
     "-Dhttpfs.log.dir=${HTTPFS_LOG:-${HADOOP_LOG_DIR}}"
     "-Dhttpfs.log.dir=${HTTPFS_LOG:-${HADOOP_LOG_DIR}}"
-  hadoop_add_param HADOOP_OPTS "-Dhttpfs.http.hostname" \
-    "-Dhttpfs.http.hostname=${HTTPFS_HOST_NAME:-$(hostname -f)}"
-  if [[ -n "${HTTPFS_SSL_ENABLED}" ]]; then
-    hadoop_add_param HADOOP_OPTS "-Dhttpfs.ssl.enabled" \
-      "-Dhttpfs.ssl.enabled=${HTTPFS_SSL_ENABLED}"
-  fi
 
 
   if [[ "${HADOOP_DAEMON_MODE}" == "default" ]] ||
   if [[ "${HADOOP_DAEMON_MODE}" == "default" ]] ||
      [[ "${HADOOP_DAEMON_MODE}" == "start" ]]; then
      [[ "${HADOOP_DAEMON_MODE}" == "start" ]]; then

+ 4 - 4
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml

@@ -16,7 +16,7 @@
 <configuration>
 <configuration>
 
 
   <property>
   <property>
-    <name>hadoop.httpfs.http.port</name>
+    <name>httpfs.http.port</name>
     <value>14000</value>
     <value>14000</value>
     <description>
     <description>
       The HTTP port for HttpFS REST API.
       The HTTP port for HttpFS REST API.
@@ -24,7 +24,7 @@
   </property>
   </property>
 
 
   <property>
   <property>
-    <name>hadoop.httpfs.http.host</name>
+    <name>httpfs.http.hostname</name>
     <value>0.0.0.0</value>
     <value>0.0.0.0</value>
     <description>
     <description>
       The bind host for HttpFS REST API.
       The bind host for HttpFS REST API.
@@ -32,7 +32,7 @@
   </property>
   </property>
 
 
   <property>
   <property>
-    <name>hadoop.httpfs.http.administrators</name>
+    <name>httpfs.http.administrators</name>
     <value></value>
     <value></value>
     <description>ACL for the admins, this configuration is used to control
     <description>ACL for the admins, this configuration is used to control
       who can access the default servlets for HttpFS server. The value
       who can access the default servlets for HttpFS server. The value
@@ -46,7 +46,7 @@
   </property>
   </property>
 
 
   <property>
   <property>
-    <name>hadoop.httpfs.ssl.enabled</name>
+    <name>httpfs.ssl.enabled</name>
     <value>false</value>
     <value>false</value>
     <description>
     <description>
       Whether SSL is enabled. Default is false, i.e. disabled.
       Whether SSL is enabled. Default is false, i.e. disabled.

+ 6 - 5
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/markdown/ServerSetup.md.vm

@@ -82,7 +82,7 @@ Enable SSL in `etc/hadoop/httpfs-site.xml`:
 
 
 ```xml
 ```xml
   <property>
   <property>
-    <name>hadoop.httpfs.ssl.enabled</name>
+    <name>httpfs.ssl.enabled</name>
     <value>true</value>
     <value>true</value>
     <description>
     <description>
       Whether SSL is enabled. Default is false, i.e. disabled.
       Whether SSL is enabled. Default is false, i.e. disabled.
@@ -142,13 +142,14 @@ configuration properties instead.
 
 
 Environment Variable        | Configuration Property       | Configuration File
 Environment Variable        | Configuration Property       | Configuration File
 ----------------------------|------------------------------|--------------------
 ----------------------------|------------------------------|--------------------
-HTTPFS_TEMP                 | hadoop.http.temp.dir         | httpfs-site.xml
-HTTPFS_HTTP_PORT            | hadoop.httpfs.http.port      | httpfs-site.xml
+HTTPFS_HTTP_HOSTNAME        | httpfs.http.hostname         | httpfs-site.xml
+HTTPFS_HTTP_PORT            | httpfs.http.port             | httpfs-site.xml
 HTTPFS_MAX_HTTP_HEADER_SIZE | hadoop.http.max.request.header.size and hadoop.http.max.response.header.size | httpfs-site.xml
 HTTPFS_MAX_HTTP_HEADER_SIZE | hadoop.http.max.request.header.size and hadoop.http.max.response.header.size | httpfs-site.xml
 HTTPFS_MAX_THREADS          | hadoop.http.max.threads      | httpfs-site.xml
 HTTPFS_MAX_THREADS          | hadoop.http.max.threads      | httpfs-site.xml
-HTTPFS_SSL_ENABLED          | hadoop.httpfs.ssl.enabled    | httpfs-site.xml
+HTTPFS_SSL_ENABLED          | httpfs.ssl.enabled           | httpfs-site.xml
 HTTPFS_SSL_KEYSTORE_FILE    | ssl.server.keystore.location | ssl-server.xml
 HTTPFS_SSL_KEYSTORE_FILE    | ssl.server.keystore.location | ssl-server.xml
 HTTPFS_SSL_KEYSTORE_PASS    | ssl.server.keystore.password | ssl-server.xml
 HTTPFS_SSL_KEYSTORE_PASS    | ssl.server.keystore.password | ssl-server.xml
+HTTPFS_TEMP                 | hadoop.http.temp.dir         | httpfs-site.xml
 
 
 HTTP Default Services
 HTTP Default Services
 ---------------------
 ---------------------
@@ -182,7 +183,7 @@ and `/stacks`, configure the following properties in `httpfs-site.xml`:
   </property>
   </property>
 
 
   <property>
   <property>
-    <name>hadoop.httpfs.http.administrators</name>
+    <name>httpfs.http.administrators</name>
     <value></value>
     <value></value>
     <description>ACL for the admins, this configuration is used to control
     <description>ACL for the admins, this configuration is used to control
       who can access the default servlets for HttpFS server. The value
       who can access the default servlets for HttpFS server. The value

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerWebServer.java

@@ -63,7 +63,7 @@ public class TestHttpFSServerWebServer {
   @Before
   @Before
   public void setUp() throws Exception {
   public void setUp() throws Exception {
     Configuration conf = new Configuration();
     Configuration conf = new Configuration();
-    conf.set(HttpFSServerWebServer.HTTP_HOST_KEY, "localhost");
+    conf.set(HttpFSServerWebServer.HTTP_HOSTNAME_KEY, "localhost");
     conf.setInt(HttpFSServerWebServer.HTTP_PORT_KEY, 0);
     conf.setInt(HttpFSServerWebServer.HTTP_PORT_KEY, 0);
     Configuration sslConf = new Configuration();
     Configuration sslConf = new Configuration();
     webServer = new HttpFSServerWebServer(conf, sslConf);
     webServer = new HttpFSServerWebServer(conf, sslConf);