瀏覽代碼

HADOOP-18981. Move oncrpc and portmap packages to hadoop-common (#6280)

Move the org.apache.hadoop.{oncrpc, portmap} packages from the hadoop-nfs module
to the hadoop-common module.

This allows for use of the protocol beyond just NFS -including within HDFS itself.

Contributed by Xing Lin
Xing Lin 1 年之前
父節點
當前提交
453e264eb4
共有 46 個文件被更改,包括 228 次插入146 次删除
  1. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RegistrationClient.java
  2. 6 6
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcAcceptedReply.java
  3. 8 8
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java
  4. 18 18
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcCallCache.java
  5. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcDeniedReply.java
  6. 0 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcInfo.java
  7. 5 5
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcMessage.java
  8. 13 13
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java
  9. 6 6
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcReply.java
  10. 0 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcResponse.java
  11. 0 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcUtil.java
  12. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpClient.java
  13. 0 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpClientHandler.java
  14. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpServer.java
  15. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpClient.java
  16. 0 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpServer.java
  17. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/XDR.java
  18. 28 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/package-info.java
  19. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java
  20. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsGSS.java
  21. 0 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsNone.java
  22. 5 5
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsSys.java
  23. 10 10
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/security/RpcAuthInfo.java
  24. 4 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/security/SecurityHandler.java
  25. 5 5
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/security/SysSecurityHandler.java
  26. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/security/Verifier.java
  27. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/security/VerifierGSS.java
  28. 0 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/security/VerifierNone.java
  29. 27 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/security/package-info.java
  30. 0 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/portmap/Portmap.java
  31. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/portmap/PortmapMapping.java
  32. 0 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/portmap/PortmapRequest.java
  33. 0 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/portmap/PortmapResponse.java
  34. 0 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java
  35. 27 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/portmap/package-info.java
  36. 14 14
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java
  37. 3 3
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/oncrpc/TestRpcAcceptedReply.java
  38. 3 3
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/oncrpc/TestRpcCall.java
  39. 14 14
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/oncrpc/TestRpcCallCache.java
  40. 2 2
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/oncrpc/TestRpcDeniedReply.java
  41. 3 3
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/oncrpc/TestRpcMessage.java
  42. 2 2
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/oncrpc/TestRpcReply.java
  43. 0 0
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/oncrpc/TestXDR.java
  44. 3 3
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/oncrpc/security/TestCredentialsSys.java
  45. 1 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/oncrpc/security/TestRpcAuthInfo.java
  46. 0 0
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/portmap/TestPortmap.java

+ 1 - 1
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RegistrationClient.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RegistrationClient.java

@@ -86,7 +86,7 @@ public class RegistrationClient extends SimpleTcpClient {
     }
 
     private void handle(RpcDeniedReply deniedReply) {
-      LOG.warn("Portmap mapping registration request was denied , " + 
+      LOG.warn("Portmap mapping registration request was denied , " +
           deniedReply);
     }
 

+ 6 - 6
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcAcceptedReply.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcAcceptedReply.java

@@ -19,7 +19,7 @@ package org.apache.hadoop.oncrpc;
 
 import org.apache.hadoop.oncrpc.security.Verifier;
 
-/** 
+/**
  * Represents RPC message MSG_ACCEPTED reply body. See RFC 1831 for details.
  * This response is sent to a request to indicate success of the request.
  */
@@ -32,7 +32,7 @@ public class RpcAcceptedReply extends RpcReply {
     PROC_UNAVAIL, /* program can't support procedure */
     GARBAGE_ARGS, /* procedure can't decode params */
     SYSTEM_ERR; /* e.g. memory allocation failure */
-    
+
     public static AcceptState fromValue(int value) {
       return values()[value];
     }
@@ -41,12 +41,12 @@ public class RpcAcceptedReply extends RpcReply {
       return ordinal();
     }
   };
-  
-  public static RpcAcceptedReply getAcceptInstance(int xid, 
+
+  public static RpcAcceptedReply getAcceptInstance(int xid,
       Verifier verifier) {
     return getInstance(xid, AcceptState.SUCCESS, verifier);
   }
-  
+
   public static RpcAcceptedReply getInstance(int xid, AcceptState state,
       Verifier verifier) {
     return new RpcAcceptedReply(xid, ReplyState.MSG_ACCEPTED, verifier,
@@ -70,7 +70,7 @@ public class RpcAcceptedReply extends RpcReply {
   public AcceptState getAcceptState() {
     return acceptState;
   }
-  
+
   @Override
   public XDR write(XDR xdr) {
     xdr.writeInt(xid);

+ 8 - 8
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java

@@ -31,17 +31,17 @@ public class RpcCall extends RpcMessage {
 
   public static RpcCall read(XDR xdr) {
     return new RpcCall(xdr.readInt(), RpcMessage.Type.fromValue(xdr.readInt()),
-        xdr.readInt(), xdr.readInt(), xdr.readInt(), xdr.readInt(), 
+        xdr.readInt(), xdr.readInt(), xdr.readInt(), xdr.readInt(),
         Credentials.readFlavorAndCredentials(xdr),
         Verifier.readFlavorAndVerifier(xdr));
   }
-  
+
   public static RpcCall getInstance(int xid, int program, int version,
       int procedure, Credentials cred, Verifier verifier) {
     return new RpcCall(xid, RpcMessage.Type.RPC_CALL, 2, program, version,
         procedure, cred, verifier);
   }
-  
+
   private final int rpcVersion;
   private final int program;
   private final int version;
@@ -64,14 +64,14 @@ public class RpcCall extends RpcMessage {
     }
     validate();
   }
-  
+
   private void validateRpcVersion() {
     if (rpcVersion != RPC_VERSION) {
       throw new IllegalArgumentException("RPC version is expected to be "
           + RPC_VERSION + " but got " + rpcVersion);
     }
   }
-  
+
   public void validate() {
     validateMessageType(RpcMessage.Type.RPC_CALL);
     validateRpcVersion();
@@ -95,7 +95,7 @@ public class RpcCall extends RpcMessage {
   public int getProcedure() {
     return procedure;
   }
-  
+
   public Credentials getCredential() {
     return credentials;
   }
@@ -103,7 +103,7 @@ public class RpcCall extends RpcMessage {
   public Verifier getVerifier() {
     return verifier;
   }
-  
+
   @Override
   public XDR write(XDR xdr) {
     xdr.writeInt(xid);
@@ -116,7 +116,7 @@ public class RpcCall extends RpcMessage {
     Verifier.writeFlavorAndVerifier(verifier, xdr);
     return xdr;
   }
-  
+
   @Override
   public String toString() {
     return String.format("Xid:%d, messageType:%s, rpcVersion:%d, program:%d,"

+ 18 - 18
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCallCache.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcCallCache.java

@@ -39,34 +39,34 @@ import org.apache.hadoop.classification.VisibleForTesting;
  * <br>
  * A request is identified by the client ID (address of the client) and
  * transaction ID (xid) from the Rpc call.
- * 
+ *
  */
 public class RpcCallCache {
-  
+
   public static class CacheEntry {
     private RpcResponse response; // null if no response has been sent
-    
+
     public CacheEntry() {
       response = null;
     }
-      
+
     public boolean isInProgress() {
       return response == null;
     }
-    
+
     public boolean isCompleted() {
       return response != null;
     }
-    
+
     public RpcResponse getResponse() {
       return response;
     }
-    
+
     public void setResponse(RpcResponse response) {
       this.response = response;
     }
   }
-  
+
   /**
    * Call that is used to track a client in the {@link RpcCallCache}
    */
@@ -85,9 +85,9 @@ public class RpcCallCache {
 
     @Override
     public int hashCode() {
-	  return xid + clientId.hashCode() * 31;
+      return xid + clientId.hashCode() * 31;
     }
-    
+
     @Override
     public boolean equals(Object obj) {
       if (this == obj) {
@@ -100,11 +100,11 @@ public class RpcCallCache {
       return clientId.equals(other.clientId) && (xid == other.xid);
     }
   }
-  
+
   private final String program;
-  
+
   private final Map<ClientRequest, CacheEntry> map;
-  
+
   public RpcCallCache(final String program, final int maxEntries) {
     if (maxEntries <= 0) {
       throw new IllegalArgumentException("Cache size is " + maxEntries
@@ -121,7 +121,7 @@ public class RpcCallCache {
       }
     };
   }
-  
+
   /**
    * Return the program name.
    * @return RPC program name
@@ -144,7 +144,7 @@ public class RpcCallCache {
     }
     e.response = response;
   }
-  
+
   /**
    * Check the cache for an entry. If it does not exist, add the request
    * as in progress.
@@ -164,7 +164,7 @@ public class RpcCallCache {
     }
     return e;
   }
-  
+
   /**
    * Return number of cached entries.
    * @return cache size
@@ -172,8 +172,8 @@ public class RpcCallCache {
   public int size() {
     return map.size();
   }
-  
-  /** 
+
+  /**
    * Iterator to the cache entries.
    * @return iterator cache iterator
    */

+ 3 - 3
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcDeniedReply.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcDeniedReply.java

@@ -19,7 +19,7 @@ package org.apache.hadoop.oncrpc;
 
 import org.apache.hadoop.oncrpc.security.Verifier;
 
-/** 
+/**
  * Represents RPC message MSG_DENIED reply body. See RFC 1831 for details.
  * This response is sent to a request to indicate failure of the request.
  */
@@ -55,7 +55,7 @@ public class RpcDeniedReply extends RpcReply {
   public RejectState getRejectState() {
     return rejectState;
   }
-  
+
   @Override
   public String toString() {
     return new StringBuffer().append("xid:").append(xid)
@@ -63,7 +63,7 @@ public class RpcDeniedReply extends RpcReply {
         .append(verifier.getFlavor()).append("rejectState:")
         .append(rejectState).toString();
   }
-  
+
   @Override
   public XDR write(XDR xdr) {
     xdr.writeInt(xid);

+ 0 - 0
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcInfo.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcInfo.java


+ 5 - 5
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcMessage.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcMessage.java

@@ -26,7 +26,7 @@ public abstract class RpcMessage {
     // the order of the values below are significant.
     RPC_CALL,
     RPC_REPLY;
-    
+
     public int getValue() {
       return ordinal();
     }
@@ -41,7 +41,7 @@ public abstract class RpcMessage {
 
   protected final int xid;
   protected final Type messageType;
-  
+
   RpcMessage(int xid, Type messageType) {
     if (messageType != Type.RPC_CALL && messageType != Type.RPC_REPLY) {
       throw new IllegalArgumentException("Invalid message type " + messageType);
@@ -49,9 +49,9 @@ public abstract class RpcMessage {
     this.xid = xid;
     this.messageType = messageType;
   }
-  
+
   public abstract XDR write(XDR xdr);
-  
+
   public int getXid() {
     return xid;
   }
@@ -59,7 +59,7 @@ public abstract class RpcMessage {
   public Type getMessageType() {
     return messageType;
   }
-  
+
   protected void validateMessageType(Type expected) {
     if (expected != messageType) {
       throw new IllegalArgumentException("Message type is expected to be "

+ 13 - 13
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java

@@ -49,7 +49,7 @@ public abstract class RpcProgram extends ChannelInboundHandlerAdapter {
   private final int lowProgVersion;
   private final int highProgVersion;
   protected final boolean allowInsecurePorts;
-  
+
   /**
    * If not null, this will be used as the socket to use to connect to the
    * system portmap daemon when registering this RPC server program.
@@ -69,7 +69,7 @@ public abstract class RpcProgram extends ChannelInboundHandlerAdapter {
 
   /**
    * Constructor
-   * 
+   *
    * @param program program name
    * @param host host where the Rpc server program is started
    * @param port port where the Rpc server program is listening to
@@ -117,7 +117,7 @@ public abstract class RpcProgram extends ChannelInboundHandlerAdapter {
       register(mapEntry, true);
     }
   }
-  
+
   /**
    * Unregister this program with the local portmapper.
    * @param transport transport layer for port map
@@ -136,7 +136,7 @@ public abstract class RpcProgram extends ChannelInboundHandlerAdapter {
       register(mapEntry, false);
     }
   }
-  
+
   /**
    * Register the program with Portmap or Rpcbind.
    * @param mapEntry port map entries
@@ -159,7 +159,7 @@ public abstract class RpcProgram extends ChannelInboundHandlerAdapter {
   // Start extra daemons or services
   public void startDaemons() {}
   public void stopDaemons() {}
-  
+
   @Override
   public void channelRead(ChannelHandlerContext ctx, Object msg)
       throws Exception {
@@ -178,7 +178,7 @@ public abstract class RpcProgram extends ChannelInboundHandlerAdapter {
     if (LOG.isTraceEnabled()) {
       LOG.trace(program + " procedure #" + call.getProcedure());
     }
-    
+
     if (this.progNumber != call.getProgram()) {
       LOG.warn("Invalid RPC call program " + call.getProgram());
       sendAcceptedReply(call, remoteAddress, AcceptState.PROG_UNAVAIL, ctx);
@@ -191,10 +191,10 @@ public abstract class RpcProgram extends ChannelInboundHandlerAdapter {
       sendAcceptedReply(call, remoteAddress, AcceptState.PROG_MISMATCH, ctx);
       return;
     }
-    
+
     handleInternal(ctx, info);
   }
-  
+
   public boolean doPortMonitoring(SocketAddress remoteAddress) {
     if (!allowInsecurePorts) {
       if (LOG.isTraceEnabled()) {
@@ -217,7 +217,7 @@ public abstract class RpcProgram extends ChannelInboundHandlerAdapter {
     }
     return true;
   }
-  
+
   private void sendAcceptedReply(RpcCall call, SocketAddress remoteAddress,
       AcceptState acceptState, ChannelHandlerContext ctx) {
     RpcAcceptedReply reply = RpcAcceptedReply.getInstance(call.getXid(),
@@ -234,7 +234,7 @@ public abstract class RpcProgram extends ChannelInboundHandlerAdapter {
     RpcResponse rsp = new RpcResponse(b, remoteAddress);
     RpcUtil.sendRpcResponse(ctx, rsp);
   }
-  
+
   protected static void sendRejectedReply(RpcCall call,
       SocketAddress remoteAddress, ChannelHandlerContext ctx) {
     XDR out = new XDR();
@@ -249,14 +249,14 @@ public abstract class RpcProgram extends ChannelInboundHandlerAdapter {
   }
 
   protected abstract void handleInternal(ChannelHandlerContext ctx, RpcInfo info);
-  
+
   @Override
   public String toString() {
     return "Rpc program: " + program + " at " + host + ":" + port;
   }
-  
+
   protected abstract boolean isIdempotent(RpcCall call);
-  
+
   public int getPort() {
     return port;
   }

+ 6 - 6
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcReply.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcReply.java

@@ -30,25 +30,25 @@ public abstract class RpcReply extends RpcMessage {
     // the order of the values below are significant.
     MSG_ACCEPTED,
     MSG_DENIED;
-    
+
     int getValue() {
       return ordinal();
     }
-    
+
     public static ReplyState fromValue(int value) {
       return values()[value];
     }
   }
-  
+
   protected final ReplyState replyState;
   protected final Verifier verifier;
-  
+
   RpcReply(int xid, ReplyState state, Verifier verifier) {
     super(xid, RpcMessage.Type.RPC_REPLY);
     this.replyState = state;
     this.verifier = verifier;
   }
-  
+
   public RpcAuthInfo getVerifier() {
     return verifier;
   }
@@ -57,7 +57,7 @@ public abstract class RpcReply extends RpcMessage {
     int xid = xdr.readInt();
     final Type messageType = Type.fromValue(xdr.readInt());
     Preconditions.checkState(messageType == RpcMessage.Type.RPC_REPLY);
-    
+
     ReplyState stat = ReplyState.fromValue(xdr.readInt());
     switch (stat) {
     case MSG_ACCEPTED:

+ 0 - 0
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcResponse.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcResponse.java


+ 0 - 0
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcUtil.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcUtil.java


+ 2 - 2
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpClient.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpClient.java

@@ -39,11 +39,11 @@ public class SimpleTcpClient {
   protected final boolean oneShot;
   private NioEventLoopGroup workerGroup;
   private ChannelFuture future;
-  
+
   public SimpleTcpClient(String host, int port, XDR request) {
     this(host,port, request, true);
   }
-  
+
   public SimpleTcpClient(String host, int port, XDR request, Boolean oneShot) {
     this.host = host;
     this.port = port;

+ 0 - 0
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpClientHandler.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpClientHandler.java


+ 1 - 1
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpServer.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpServer.java

@@ -93,7 +93,7 @@ public class SimpleTcpServer {
     LOG.info("Started listening to TCP requests at port " + boundPort + " for "
         + rpcProgram + " with workerCount " + workerCount);
   }
-  
+
   // boundPort will be set only after server starts
   public int getBoundPort() {
     return this.boundPort;

+ 2 - 2
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpClient.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpClient.java

@@ -27,7 +27,7 @@ import java.util.Arrays;
  * A simple UDP based RPC client which just sends one request to a server.
  */
 public class SimpleUdpClient {
-  
+
   protected final String host;
   protected final int port;
   protected final XDR request;
@@ -71,7 +71,7 @@ public class SimpleUdpClient {
       DatagramPacket receivePacket = new DatagramPacket(receiveData,
           receiveData.length);
       socket.receive(receivePacket);
-  
+
       // Check reply status
       XDR xdr = new XDR(Arrays.copyOfRange(receiveData, 0,
           receivePacket.getLength()));

+ 0 - 0
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpServer.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpServer.java


+ 1 - 1
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/XDR.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/XDR.java

@@ -76,7 +76,7 @@ public final class XDR {
    * Wraps a byte array as a read-only XDR message. There's no copy involved,
    * thus it is the client's responsibility to ensure that the byte array
    * remains unmodified when using the XDR object.
-   * 
+   *
    * @param src
    *          the byte array to be wrapped.
    */

+ 28 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/package-info.java

@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * This package provides ONC RPC implementation with simple UDP/TCP
+ * Servers and clients.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+package org.apache.hadoop.oncrpc;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;

+ 3 - 3
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java

@@ -45,7 +45,7 @@ public abstract class Credentials extends RpcAuthInfo {
     credentials.read(xdr);
     return credentials;
   }
-  
+
   /**
    * Write AuthFlavor and the credentials to the XDR
    * @param cred credentials
@@ -63,9 +63,9 @@ public abstract class Credentials extends RpcAuthInfo {
     }
     cred.write(xdr);
   }
-  
+
   protected int mCredentialsLength;
-  
+
   protected Credentials(AuthFlavor flavor) {
     super(flavor);
   }

+ 2 - 2
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsGSS.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsGSS.java

@@ -29,13 +29,13 @@ public class CredentialsGSS extends Credentials {
   @Override
   public void read(XDR xdr) {
     // TODO Auto-generated method stub
-    
+
   }
 
   @Override
   public void write(XDR xdr) {
     // TODO Auto-generated method stub
-    
+
   }
 
 }

+ 0 - 0
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsNone.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsNone.java


+ 5 - 5
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsSys.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsSys.java

@@ -26,7 +26,7 @@ import org.apache.hadoop.oncrpc.XDR;
 
 /** Credential used by AUTH_SYS */
 public class CredentialsSys extends Credentials {
- 
+
   private static final String HOSTNAME;
   static {
     try {
@@ -40,7 +40,7 @@ public class CredentialsSys extends Credentials {
       throw new RuntimeException(e);
     }
   }
-  
+
   protected int mUID, mGID;
   protected int[] mAuxGIDs;
   protected String mHostName;
@@ -51,7 +51,7 @@ public class CredentialsSys extends Credentials {
     this.mCredentialsLength = 0;
     this.mHostName = HOSTNAME;
   }
-  
+
   public int getGID() {
     return mGID;
   }
@@ -117,12 +117,12 @@ public class CredentialsSys extends Credentials {
       mCredentialsLength += mAuxGIDs.length * 4;
     }
     xdr.writeInt(mCredentialsLength);
-    
+
     xdr.writeInt(mStamp);
     xdr.writeString(mHostName);
     xdr.writeInt(mUID);
     xdr.writeInt(mGID);
-    
+
     if((mAuxGIDs == null) || (mAuxGIDs.length == 0)) {
       xdr.writeInt(0);
     } else {

+ 10 - 10
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/RpcAuthInfo.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/security/RpcAuthInfo.java

@@ -30,17 +30,17 @@ public abstract class RpcAuthInfo {
     AUTH_SHORT(2),
     AUTH_DH(3),
     RPCSEC_GSS(6);
-    
+
     private int value;
-    
+
     AuthFlavor(int value) {
       this.value = value;
     }
-    
+
     public int getValue() {
       return value;
     }
-    
+
     static AuthFlavor fromValue(int value) {
       for (AuthFlavor v : values()) {
         if (v.value == value) {
@@ -50,28 +50,28 @@ public abstract class RpcAuthInfo {
       throw new IllegalArgumentException("Invalid AuthFlavor value " + value);
     }
   }
-  
+
   private final AuthFlavor flavor;
-  
+
   protected RpcAuthInfo(AuthFlavor flavor) {
     this.flavor = flavor;
   }
-  
+
   /**
    * Load auth info.
    * @param xdr XDR message
    */
   public abstract void read(XDR xdr);
-  
+
   /** Write auth info.
    * @param xdr XDR message
    */
   public abstract void write(XDR xdr);
-  
+
   public AuthFlavor getFlavor() {
     return flavor;
   }
-  
+
   @Override
   public String toString() {
     return "(AuthFlavor:" + flavor + ")";

+ 4 - 4
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/SecurityHandler.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/security/SecurityHandler.java

@@ -27,7 +27,7 @@ import org.slf4j.LoggerFactory;
 public abstract class SecurityHandler {
   public static final Logger LOG =
       LoggerFactory.getLogger(SecurityHandler.class);
-  
+
   public abstract String getUser();
 
   public abstract boolean shouldSilentlyDrop(RpcCall request);
@@ -52,7 +52,7 @@ public abstract class SecurityHandler {
   public XDR unwrap(RpcCall request, byte[] data ) throws IOException {
     throw new UnsupportedOperationException();
   }
-  
+
   /**
    * Used by GSS.
    * @param request RPC request
@@ -63,7 +63,7 @@ public abstract class SecurityHandler {
   public byte[] wrap(RpcCall request, XDR response) throws IOException {
     throw new UnsupportedOperationException();
   }
-  
+
   /**
    * Used by AUTH_SYS.
    * Return the uid of the NFS user credential.
@@ -72,7 +72,7 @@ public abstract class SecurityHandler {
   public int getUid() {
     throw new UnsupportedOperationException();
   }
-  
+
   /**
    * Used by AUTH_SYS.
    * Return the gid of the NFS user credential.

+ 5 - 5
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/SysSecurityHandler.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/security/SysSecurityHandler.java

@@ -22,16 +22,16 @@ import org.apache.hadoop.security.IdMappingConstant;
 import org.apache.hadoop.security.IdMappingServiceProvider;
 
 public class SysSecurityHandler extends SecurityHandler {
-  
+
   private final IdMappingServiceProvider iug;
   private final CredentialsSys mCredentialsSys;
-  
+
   public SysSecurityHandler(CredentialsSys credentialsSys,
       IdMappingServiceProvider iug) {
     this.mCredentialsSys = credentialsSys;
     this.iug = iug;
   }
-  
+
   @Override
   public String getUser() {
     return iug.getUserName(mCredentialsSys.getUID(),
@@ -47,12 +47,12 @@ public class SysSecurityHandler extends SecurityHandler {
   public VerifierNone getVerifer(RpcCall request) {
     return new VerifierNone();
   }
-  
+
   @Override
   public int getUid() {
     return mCredentialsSys.getUID();
   }
-  
+
   @Override
   public int getGid() {
     return mCredentialsSys.getGID();

+ 2 - 2
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Verifier.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/security/Verifier.java

@@ -56,7 +56,7 @@ public abstract class Verifier extends RpcAuthInfo {
     verifer.read(xdr);
     return verifer;
   }
-  
+
   /**
    * Write AuthFlavor and the verifier to the XDR.
    * @param verifier written to XDR
@@ -71,5 +71,5 @@ public abstract class Verifier extends RpcAuthInfo {
       throw new UnsupportedOperationException("Cannot recognize the verifier");
     }
     verifier.write(xdr);
-  }  
+  }
 }

+ 2 - 2
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/VerifierGSS.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/security/VerifierGSS.java

@@ -29,13 +29,13 @@ public class VerifierGSS extends Verifier {
   @Override
   public void read(XDR xdr) {
     // TODO Auto-generated method stub
-    
+
   }
 
   @Override
   public void write(XDR xdr) {
     // TODO Auto-generated method stub
-    
+
   }
 
 }

+ 0 - 0
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/VerifierNone.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/security/VerifierNone.java


+ 27 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/security/package-info.java

@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * This package provides security related implementation for ONC RPC.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+package org.apache.hadoop.oncrpc.security;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;

+ 0 - 0
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/Portmap.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/portmap/Portmap.java


+ 2 - 2
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/PortmapMapping.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/portmap/PortmapMapping.java

@@ -22,7 +22,7 @@ import org.apache.hadoop.oncrpc.XDR;
 /**
  * Represents a mapping entry for in the Portmap service for binding RPC
  * protocols. See RFC 1833 for details.
- * 
+ *
  * This maps a program to a port number.
  */
 public class PortmapMapping {
@@ -61,7 +61,7 @@ public class PortmapMapping {
   public static String key(PortmapMapping mapping) {
     return mapping.program + " " + mapping.version + " " + mapping.transport;
   }
-  
+
   @Override
   public String toString() {
     return String.format("(PortmapMapping-%d:%d:%d:%d)", program, version,

+ 0 - 0
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/PortmapRequest.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/portmap/PortmapRequest.java


+ 0 - 0
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/PortmapResponse.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/portmap/PortmapResponse.java


+ 0 - 0
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java


+ 27 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/portmap/package-info.java

@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * This package provides a port mapper implementation used by ONC RPC.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+package org.apache.hadoop.portmap;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;

+ 14 - 14
hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java → hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java

@@ -40,7 +40,7 @@ import org.mockito.Mockito;
 import org.slf4j.event.Level;
 
 public class TestFrameDecoder {
-  
+
   static {
     GenericTestUtils.setLogLevel(RpcProgram.LOG, Level.TRACE);
   }
@@ -78,7 +78,7 @@ public class TestFrameDecoder {
           return;
         }
       }
-      
+
       resultSize = info.data().readableBytes();
       RpcAcceptedReply reply = RpcAcceptedReply.getAcceptInstance(1234,
           new VerifierNone());
@@ -127,7 +127,7 @@ public class TestFrameDecoder {
     assertTrue(decoder.isLast());
     buf.release();
   }
-  
+
   @Test
   public void testMultipleFrames() {
     RpcFrameDecoder decoder = new RpcFrameDecoder();
@@ -142,7 +142,7 @@ public class TestFrameDecoder {
     assertTrue(XDR.fragmentSize(fragment1)==10);
 
     List<Object> outputBufs = new ArrayList<>();
-    
+
     // decoder should wait for the final fragment
     ByteBuf buf = Unpooled.directBuffer(4 + 10, 4 + 10);
     buf.writeBytes(fragment1);
@@ -188,7 +188,7 @@ public class TestFrameDecoder {
     // Verify the server got the request with right size
     assertEquals(requestSize, resultSize);
   }
-  
+
   @Test
   public void testUnprivilegedPort() throws InterruptedException {
     // Don't allow connections from unprivileged ports. Given that this test is
@@ -205,7 +205,7 @@ public class TestFrameDecoder {
 
     // Verify the server rejected the request.
     assertEquals(0, resultSize);
-    
+
     // Ensure that the NULL procedure does in fact succeed.
     xdrOut = new XDR();
     createPortmapXDRheader(xdrOut, 0);
@@ -213,14 +213,14 @@ public class TestFrameDecoder {
     buffer = new byte[bufsize];
     xdrOut.writeFixedOpaque(buffer);
     int requestSize = xdrOut.size() - headerSize;
-    
+
     // Send the request to the server
     testRequest(xdrOut, serverPort);
 
     // Verify the server did not reject the request.
     assertEquals(requestSize, resultSize);
   }
-  
+
   private static int startRpcServer(boolean allowInsecurePorts)
       throws InterruptedException {
     Random rand = new Random();
@@ -262,19 +262,19 @@ public class TestFrameDecoder {
   }
   /*
    * static void testGetport() { XDR xdr_out = new XDR();
-   * 
+   *
    * createPortmapXDRheader(xdr_out, 3);
-   * 
+   *
    * xdr_out.writeInt(100003); xdr_out.writeInt(3); xdr_out.writeInt(6);
    * xdr_out.writeInt(0);
-   * 
+   *
    * XDR request2 = new XDR();
-   * 
+   *
    * createPortmapXDRheader(xdr_out, 3); request2.writeInt(100003);
    * request2.writeInt(3); request2.writeInt(6); request2.writeInt(0);
-   * 
+   *
    * testRequest(xdr_out); }
-   * 
+   *
    * static void testDump() { XDR xdr_out = new XDR();
    * createPortmapXDRheader(xdr_out, 4); testRequest(xdr_out); }
    */

+ 3 - 3
hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcAcceptedReply.java → hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/oncrpc/TestRpcAcceptedReply.java

@@ -38,16 +38,16 @@ public class TestRpcAcceptedReply {
     assertEquals(AcceptState.GARBAGE_ARGS, AcceptState.fromValue(4));
     assertEquals(AcceptState.SYSTEM_ERR, AcceptState.fromValue(5));
   }
-  
+
   @Test(expected = IndexOutOfBoundsException.class)
   public void testAcceptStateFromInvalidValue() {
     AcceptState.fromValue(6);
   }
-  
+
   @Test
   public void testConstructor() {
     Verifier verifier = new VerifierNone();
-    RpcAcceptedReply reply = new RpcAcceptedReply(0, 
+    RpcAcceptedReply reply = new RpcAcceptedReply(0,
         ReplyState.MSG_ACCEPTED, verifier, AcceptState.SUCCESS);
     assertEquals(0, reply.getXid());
     assertEquals(RpcMessage.Type.RPC_REPLY, reply.getMessageType());

+ 3 - 3
hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcCall.java → hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/oncrpc/TestRpcCall.java

@@ -29,7 +29,7 @@ import org.junit.Test;
  * Tests for {@link RpcCall}
  */
 public class TestRpcCall {
-  
+
   @Test
   public void testConstructor() {
     Credentials credential = new CredentialsNone();
@@ -49,13 +49,13 @@ public class TestRpcCall {
     assertEquals(credential, call.getCredential());
     assertEquals(verifier, call.getVerifier());
   }
-  
+
   @Test(expected=IllegalArgumentException.class)
   public void testInvalidRpcVersion() {
     int invalidRpcVersion = 3;
     new RpcCall(0, RpcMessage.Type.RPC_CALL, invalidRpcVersion, 2, 3, 4, null, null);
   }
-  
+
   @Test(expected=IllegalArgumentException.class)
   public void testInvalidRpcMessageType() {
     RpcMessage.Type invalidMessageType = RpcMessage.Type.RPC_REPLY; // Message typ is not RpcMessage.RPC_CALL

+ 14 - 14
hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcCallCache.java → hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/oncrpc/TestRpcCallCache.java

@@ -38,55 +38,55 @@ import static org.mockito.Mockito.*;
  * Unit tests for {@link RpcCallCache}
  */
 public class TestRpcCallCache {
-  
+
   @Test(expected=IllegalArgumentException.class)
   public void testRpcCallCacheConstructorIllegalArgument0(){
     new RpcCallCache("test", 0);
   }
-  
+
   @Test(expected=IllegalArgumentException.class)
   public void testRpcCallCacheConstructorIllegalArgumentNegative(){
     new RpcCallCache("test", -1);
   }
-  
+
   @Test
   public void testRpcCallCacheConstructor(){
     RpcCallCache cache = new RpcCallCache("test", 100);
     assertEquals("test", cache.getProgram());
   }
-  
+
   @Test
   public void testAddRemoveEntries() throws UnknownHostException {
     RpcCallCache cache = new RpcCallCache("test", 100);
     InetAddress clientIp = InetAddress.getByName("1.1.1.1");
     int xid = 100;
-    
+
     // Ensure null is returned when there is no entry in the cache
     // An entry is added to indicate the request is in progress
     CacheEntry e = cache.checkOrAddToCache(clientIp, xid);
     assertNull(e);
     e = cache.checkOrAddToCache(clientIp, xid);
     validateInprogressCacheEntry(e);
-    
+
     // Set call as completed
     RpcResponse response = mock(RpcResponse.class);
     cache.callCompleted(clientIp, xid, response);
     e = cache.checkOrAddToCache(clientIp, xid);
     validateCompletedCacheEntry(e, response);
   }
-  
+
   private void validateInprogressCacheEntry(CacheEntry c) {
     assertTrue(c.isInProgress());
     assertFalse(c.isCompleted());
     assertNull(c.getResponse());
   }
-  
+
   private void validateCompletedCacheEntry(CacheEntry c, RpcResponse response) {
     assertFalse(c.isInProgress());
     assertTrue(c.isCompleted());
     assertEquals(response, c.getResponse());
   }
-  
+
   @Test
   public void testCacheEntry() {
     CacheEntry c = new CacheEntry();
@@ -94,16 +94,16 @@ public class TestRpcCallCache {
     assertTrue(c.isInProgress());
     assertFalse(c.isCompleted());
     assertNull(c.getResponse());
-    
+
     RpcResponse response = mock(RpcResponse.class);
     c.setResponse(response);
     validateCompletedCacheEntry(c, response);
   }
-  
+
   @Test
   public void testCacheFunctionality() throws UnknownHostException {
     RpcCallCache cache = new RpcCallCache("Test", 10);
-    
+
     // Add 20 entries to the cache and only last 10 should be retained
     int size = 0;
     for (int clientId = 0; clientId < 20; clientId++) {
@@ -113,7 +113,7 @@ public class TestRpcCallCache {
       size = Math.min(++size, 10);
       System.out.println("Cache size " + cache.size());
       assertEquals(size, cache.size()); // Ensure the cache size is correct
-      
+
       // Ensure the cache entries are correct
       int startEntry = Math.max(clientId - 10 + 1, 0);
       Iterator<Entry<ClientRequest, CacheEntry>> iterator = cache.iterator();
@@ -123,7 +123,7 @@ public class TestRpcCallCache {
         assertEquals(InetAddress.getByName("1.1.1." + (startEntry + i)),
             key.getClientId());
       }
-      
+
       // Ensure cache entries are returned as in progress.
       for (int i = 0; i < size; i++) {
         CacheEntry e = cache.checkOrAddToCache(

+ 2 - 2
hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcDeniedReply.java → hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/oncrpc/TestRpcDeniedReply.java

@@ -32,12 +32,12 @@ public class TestRpcDeniedReply {
     Assert.assertEquals(RejectState.RPC_MISMATCH, RejectState.fromValue(0));
     Assert.assertEquals(RejectState.AUTH_ERROR, RejectState.fromValue(1));
   }
-  
+
   @Test(expected=IndexOutOfBoundsException.class)
   public void testRejectStateFromInvalidValue1() {
     RejectState.fromValue(2);
   }
-  
+
   @Test
   public void testConstructor() {
     RpcDeniedReply reply = new RpcDeniedReply(0, ReplyState.MSG_ACCEPTED,

+ 3 - 3
hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcMessage.java → hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/oncrpc/TestRpcMessage.java

@@ -32,20 +32,20 @@ public class TestRpcMessage {
       }
     };
   }
-  
+
   @Test
   public void testRpcMessage() {
     RpcMessage msg = getRpcMessage(0, RpcMessage.Type.RPC_CALL);
     Assert.assertEquals(0, msg.getXid());
     Assert.assertEquals(RpcMessage.Type.RPC_CALL, msg.getMessageType());
   }
-  
+
   @Test
   public void testValidateMessage() {
     RpcMessage msg = getRpcMessage(0, RpcMessage.Type.RPC_CALL);
     msg.validateMessageType(RpcMessage.Type.RPC_CALL);
   }
-  
+
   @Test(expected = IllegalArgumentException.class)
   public void testValidateMessageException() {
     RpcMessage msg = getRpcMessage(0, RpcMessage.Type.RPC_CALL);

+ 2 - 2
hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcReply.java → hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/oncrpc/TestRpcReply.java

@@ -32,12 +32,12 @@ public class TestRpcReply {
     Assert.assertEquals(ReplyState.MSG_ACCEPTED, ReplyState.fromValue(0));
     Assert.assertEquals(ReplyState.MSG_DENIED, ReplyState.fromValue(1));
   }
-  
+
   @Test(expected=IndexOutOfBoundsException.class)
   public void testReplyStateFromInvalidValue1() {
     ReplyState.fromValue(2);
   }
-  
+
   @Test
   public void testRpcReply() {
     RpcReply reply = new RpcReply(0, ReplyState.MSG_ACCEPTED,

+ 0 - 0
hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestXDR.java → hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/oncrpc/TestXDR.java


+ 3 - 3
hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/security/TestCredentialsSys.java → hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/oncrpc/security/TestCredentialsSys.java

@@ -34,13 +34,13 @@ public class TestCredentialsSys {
     credential.setUID(0);
     credential.setGID(1);
     credential.setStamp(1234);
-    
+
     XDR xdr = new XDR();
     credential.write(xdr);
-    
+
     CredentialsSys newCredential = new CredentialsSys();
     newCredential.read(xdr.asReadOnlyWrap());
-    
+
     assertEquals(0, newCredential.getUID());
     assertEquals(1, newCredential.getGID());
     assertEquals(1234, newCredential.getStamp());

+ 1 - 1
hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/security/TestRpcAuthInfo.java → hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/oncrpc/security/TestRpcAuthInfo.java

@@ -35,7 +35,7 @@ public class TestRpcAuthInfo {
     assertEquals(AuthFlavor.AUTH_DH, AuthFlavor.fromValue(3));
     assertEquals(AuthFlavor.RPCSEC_GSS, AuthFlavor.fromValue(6));
   }
-  
+
   @Test(expected=IllegalArgumentException.class)
   public void testInvalidAuthFlavor() {
     assertEquals(AuthFlavor.AUTH_NONE, AuthFlavor.fromValue(4));

+ 0 - 0
hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/portmap/TestPortmap.java → hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/portmap/TestPortmap.java