Преглед изворни кода

HADOOP-18249. Fix getUri() in HttpRequest has been deprecated. (#4335)

* HADOOP-18249. Fix getUri() in HttpRequest has been deprecated.
WebHdfsHandler.java req.getUri() replace uri(), req.getMethod() replace method()
HostRestrictingAuthorizationFilterHandler.java req.getUri() replace uri()
TestHostRestrictingAuthorizationFilterHandler.java remove throws Exception, channelResponse.getStatus() replace status().

* HADOOP-18249. Fix getUri() in HttpRequest has been deprecated.

* HADOOP-18249. Fix Some CheckStyle.

Co-authored-by: slfan1989 <louj1988@@>
slfan1989 пре 3 година
родитељ
комит
f469b0e143

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/HostRestrictingAuthorizationFilterHandler.java

@@ -197,7 +197,7 @@ final class HostRestrictingAuthorizationFilterHandler
     @Override
     public String getQueryString() {
       try {
-        return (new URI(req.getUri()).getQuery());
+        return (new URI(req.uri()).getQuery());
       } catch (URISyntaxException e) {
         return null;
       }
@@ -205,7 +205,7 @@ final class HostRestrictingAuthorizationFilterHandler
 
     @Override
     public String getRequestURI() {
-      String uri = req.getUri();
+      String uri = req.uri();
       // Netty's getUri includes the query string, while Servlet's does not
       return (uri.substring(0, uri.indexOf("?") >= 0 ? uri.indexOf("?") :
           uri.length()));

+ 2 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/SimpleHttpProxyHandler.java

@@ -98,7 +98,7 @@ class SimpleHttpProxyHandler extends SimpleChannelInboundHandler<HttpRequest> {
   @Override
   public void channelRead0
     (final ChannelHandlerContext ctx, final HttpRequest req) {
-    uri = req.getUri();
+    uri = req.uri();
     final Channel client = ctx.channel();
     Bootstrap proxiedServer = new Bootstrap()
       .group(client.eventLoop())
@@ -117,8 +117,7 @@ class SimpleHttpProxyHandler extends SimpleChannelInboundHandler<HttpRequest> {
       public void operationComplete(ChannelFuture future) throws Exception {
         if (future.isSuccess()) {
           ctx.channel().pipeline().remove(HttpResponseEncoder.class);
-          HttpRequest newReq = new DefaultFullHttpRequest(HTTP_1_1,
-            req.getMethod(), req.getUri());
+          HttpRequest newReq = new DefaultFullHttpRequest(HTTP_1_1, req.method(), req.uri());
           newReq.headers().add(req.headers());
           newReq.headers().set(CONNECTION, Values.CLOSE);
           future.channel().writeAndFlush(newReq);

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/URLDispatcher.java

@@ -43,7 +43,7 @@ class URLDispatcher extends SimpleChannelInboundHandler<HttpRequest> {
   @Override
   protected void channelRead0(ChannelHandlerContext ctx, HttpRequest req)
       throws Exception {
-    String uri = req.getUri();
+    String uri = req.uri();
     ChannelPipeline p = ctx.pipeline();
     if (uri.startsWith(WEBHDFS_PREFIX)) {
       WebHdfsHandler h = new WebHdfsHandler(conf, confForCreate);

+ 5 - 5
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/WebHdfsHandler.java

@@ -122,8 +122,8 @@ public class WebHdfsHandler extends SimpleChannelInboundHandler<HttpRequest> {
   @Override
   public void channelRead0(final ChannelHandlerContext ctx,
                            final HttpRequest req) throws Exception {
-    Preconditions.checkArgument(req.getUri().startsWith(WEBHDFS_PREFIX));
-    QueryStringDecoder queryString = new QueryStringDecoder(req.getUri());
+    Preconditions.checkArgument(req.uri().startsWith(WEBHDFS_PREFIX));
+    QueryStringDecoder queryString = new QueryStringDecoder(req.uri());
     params = new ParameterParser(queryString, conf);
     DataNodeUGIProvider ugiProvider = new DataNodeUGIProvider(params);
     ugi = ugiProvider.ugi();
@@ -144,7 +144,7 @@ public class WebHdfsHandler extends SimpleChannelInboundHandler<HttpRequest> {
             LOG.warn("Error retrieving hostname: ", e);
             host = "unknown";
           }
-          REQLOG.info(host + " " + req.getMethod() + " "  + req.getUri() + " " +
+          REQLOG.info(host + " " + req.method() + " "  + req.uri() + " " +
               getResponseCode());
         }
         return null;
@@ -154,13 +154,13 @@ public class WebHdfsHandler extends SimpleChannelInboundHandler<HttpRequest> {
 
   int getResponseCode() {
     return (resp == null) ? INTERNAL_SERVER_ERROR.code() :
-        resp.getStatus().code();
+        resp.status().code();
   }
 
   public void handle(ChannelHandlerContext ctx, HttpRequest req)
     throws IOException, URISyntaxException {
     String op = params.op();
-    HttpMethod method = req.getMethod();
+    HttpMethod method = req.method();
     if (PutOpParam.Op.CREATE.name().equalsIgnoreCase(op)
       && method == PUT) {
       onCreate(ctx);

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java

@@ -76,7 +76,7 @@ class FSImageHandler extends SimpleChannelInboundHandler<HttpRequest> {
   @Override
   public void channelRead0(ChannelHandlerContext ctx, HttpRequest request)
       throws Exception {
-    if (request.getMethod() != HttpMethod.GET) {
+    if (request.method() != HttpMethod.GET) {
       DefaultHttpResponse resp = new DefaultHttpResponse(HTTP_1_1,
           METHOD_NOT_ALLOWED);
       resp.headers().set(CONNECTION, CLOSE);
@@ -84,7 +84,7 @@ class FSImageHandler extends SimpleChannelInboundHandler<HttpRequest> {
       return;
     }
 
-    QueryStringDecoder decoder = new QueryStringDecoder(request.getUri());
+    QueryStringDecoder decoder = new QueryStringDecoder(request.uri());
     // check path. throw exception if path doesn't start with WEBHDFS_PREFIX
     String path = getPath(decoder);
     final String op = getOp(decoder);
@@ -140,7 +140,7 @@ class FSImageHandler extends SimpleChannelInboundHandler<HttpRequest> {
   public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause)
           throws Exception {
     Exception e = cause instanceof Exception ? (Exception) cause : new
-      Exception(cause);
+        Exception(cause);
     final String output = JsonUtil.toJsonString(e);
     ByteBuf content = Unpooled.wrappedBuffer(output.getBytes(Charsets.UTF_8));
     final DefaultFullHttpResponse resp = new DefaultFullHttpResponse(

+ 6 - 6
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/TestHostRestrictingAuthorizationFilterHandler.java

@@ -48,7 +48,7 @@ public class TestHostRestrictingAuthorizationFilterHandler {
    * Test running in with no ACL rules (restrict all)
    */
   @Test
-  public void testRejectAll() throws Exception {
+  public void testRejectAll() {
     EmbeddedChannel channel = new CustomEmbeddedChannel("127.0.0.1", 1006,
         new HostRestrictingAuthorizationFilterHandler());
     FullHttpRequest httpRequest =
@@ -61,7 +61,7 @@ public class TestHostRestrictingAuthorizationFilterHandler {
     DefaultHttpResponse channelResponse =
         (DefaultHttpResponse) channel.outboundMessages().poll();
     assertNotNull("Expected response to exist.", channelResponse);
-    assertEquals(HttpResponseStatus.FORBIDDEN, channelResponse.getStatus());
+    assertEquals(HttpResponseStatus.FORBIDDEN, channelResponse.status());
     assertFalse(channel.isOpen());
   }
 
@@ -70,7 +70,7 @@ public class TestHostRestrictingAuthorizationFilterHandler {
    * reused
    */
   @Test
-  public void testMultipleAcceptedGETsOneChannel() throws Exception {
+  public void testMultipleAcceptedGETsOneChannel() {
     Configuration conf = new Configuration();
     conf.set(CONFNAME, "*,*,/allowed");
     HostRestrictingAuthorizationFilter filter =
@@ -102,7 +102,7 @@ public class TestHostRestrictingAuthorizationFilterHandler {
    * single filter instance
    */
   @Test
-  public void testMultipleChannels() throws Exception {
+  public void testMultipleChannels() {
     Configuration conf = new Configuration();
     conf.set(CONFNAME, "*,*,/allowed");
     HostRestrictingAuthorizationFilter filter =
@@ -140,7 +140,7 @@ public class TestHostRestrictingAuthorizationFilterHandler {
    * Test accepting a GET request for the file checksum
    */
   @Test
-  public void testAcceptGETFILECHECKSUM() throws Exception {
+  public void testAcceptGETFILECHECKSUM() {
     EmbeddedChannel channel = new CustomEmbeddedChannel("127.0.0.1", 1006,
         new HostRestrictingAuthorizationFilterHandler());
     FullHttpRequest httpRequest =
@@ -158,7 +158,7 @@ public class TestHostRestrictingAuthorizationFilterHandler {
    */
   protected static class CustomEmbeddedChannel extends EmbeddedChannel {
 
-    private InetSocketAddress socketAddress;
+    private final InetSocketAddress socketAddress;
 
     /*
      * A normal @{EmbeddedChannel} constructor which takes the remote client