Browse Source

HADOOP-7897. ProtobufRpcEngine client side exception mechanism is not consistent with WritableRpcEngine. Contributed by Suresh Srinivas.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1212004 13f79535-47bb-0310-9956-ffa450edef68
Suresh Srinivas 13 years ago
parent
commit
4283ac6d7a

+ 10 - 7
hadoop-common-project/hadoop-common/CHANGES.txt

@@ -16,26 +16,26 @@ Trunk (unreleased changes)
     HADOOP-7595. Upgrade dependency to Avro 1.5.3. (Alejandro Abdelnur via atm)
 
     HADOOP-7524. Change RPC to allow multiple protocols including multuple
-                 versions of the same protocol (sanjay Radia)
+    versions of the same protocol (sanjay Radia)
 
     HADOOP-7607. Simplify the RPC proxy cleanup process. (atm)
 
     HADOOP-7635. RetryInvocationHandler should release underlying resources on
-                 close (atm)
+    close (atm)
 
     HADOOP-7687 Make getProtocolSignature public  (sanjay)
 
     HADOOP-7693. Enhance AvroRpcEngine to support the new #addProtocol
-                 interface introduced in HADOOP-7524.  (cutting)
+    interface introduced in HADOOP-7524.  (cutting)
 
     HADOOP-7716. RPC protocol registration on SS does not log the protocol name
-                 (only the class which may be different) (sanjay)
+    (only the class which may be different) (sanjay)
 
     HADOOP-7717. Move handling of concurrent client fail-overs to
-                 RetryInvocationHandler (atm)
+    RetryInvocationHandler (atm)
 
     HADOOP-6490. Use StringUtils over String#replace in Path#normalizePath.
-                 (Uma Maheswara Rao G via harsh)
+    (Uma Maheswara Rao G via harsh)
 
     HADOOP-7736. Remove duplicate Path#normalizePath call. (harsh)
 
@@ -121,7 +121,10 @@ Trunk (unreleased changes)
     KerberosName name rules from configuration. (tucu)
 
     HADOOP-7888. TestFailoverProxy fails intermittently on trunk. (Jason Lowe
-                 via atm)
+    via atm)
+
+    HADOOP-7897. ProtobufRpcEngine client side exception mechanism is not
+    consistent with WritableRpcEngine. (suresh)
 
   OPTIMIZATIONS
 

+ 9 - 6
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java

@@ -33,14 +33,17 @@ public class ProtobufHelper {
   }
 
   /**
-   * Return the RemoteException wrapped in ServiceException as cause.
-   * @param se ServiceException that wraps RemoteException
-   * @return RemoteException wrapped in ServiceException or
-   *         a new IOException that wraps unexpected ServiceException.
+   * Return the IOException thrown by the remote server wrapped in 
+   * ServiceException as cause.
+   * @param se ServiceException that wraps IO exception thrown by the server
+   * @return Exception wrapped in ServiceException or
+   *         a new IOException that wraps the unexpected ServiceException.
    */
   public static IOException getRemoteException(ServiceException se) {
     Throwable e = se.getCause();
-    return ((e instanceof RemoteException) ? (IOException) e : 
-      new IOException(se));
+    if (e == null) {
+      return new IOException(se);
+    }
+    return e instanceof IOException ? (IOException) e : new IOException(se);
   }
 }

+ 10 - 16
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java

@@ -144,9 +144,10 @@ public class ProtobufRpcEngine implements RpcEngine {
      * 
      * ServiceException has the following causes:
      * <ol>
-     * <li>Exceptions encountered in this methods are thrown as
-     * RpcClientException, wrapped in RemoteException</li>
-     * <li>Remote exceptions are thrown wrapped in RemoteException</li>
+     * <li>Exceptions encountered on the client side in this method are 
+     * set as cause in ServiceException as is.</li>
+     * <li>Exceptions from the server are wrapped in RemoteException and are
+     * set as cause in ServiceException</li>
      * </ol>
      * 
      * Note that the client calling protobuf RPC methods, must handle
@@ -167,9 +168,8 @@ public class ProtobufRpcEngine implements RpcEngine {
       try {
         val = (RpcResponseWritable) client.call(RpcKind.RPC_PROTOCOL_BUFFER,
             new RpcRequestWritable(rpcRequest), remoteId);
-      } catch (Exception e) {
-        RpcClientException ce = new RpcClientException("Client exception", e);
-        throw new ServiceException(getRemoteException(ce));
+      } catch (Throwable e) {
+        throw new ServiceException(e);
       }
 
       HadoopRpcResponseProto response = val.message;
@@ -197,9 +197,8 @@ public class ProtobufRpcEngine implements RpcEngine {
       try {
         returnMessage = prototype.newBuilderForType()
             .mergeFrom(response.getResponse()).build();
-      } catch (InvalidProtocolBufferException e) {
-        RpcClientException ce = new RpcClientException("Client exception", e);
-        throw new ServiceException(getRemoteException(ce));
+      } catch (Throwable e) {
+        throw new ServiceException(e);
       }
       return returnMessage;
     }
@@ -309,11 +308,6 @@ public class ProtobufRpcEngine implements RpcEngine {
         numHandlers, numReaders, queueSizePerHandler, verbose, secretManager);
   }
   
-  private static RemoteException getRemoteException(Exception e) {
-    return new RemoteException(e.getClass().getName(),
-        StringUtils.stringifyException(e));
-  }
-
   public static class Server extends RPC.Server {
     /**
      * Construct an RPC server.
@@ -335,8 +329,8 @@ public class ProtobufRpcEngine implements RpcEngine {
           numReaders, queueSizePerHandler, conf, classNameBase(protocolImpl
               .getClass().getName()), secretManager);
       this.verbose = verbose;  
-      registerProtocolAndImpl(RpcKind.RPC_PROTOCOL_BUFFER, 
-          protocolClass, protocolImpl);
+      registerProtocolAndImpl(RpcKind.RPC_PROTOCOL_BUFFER, protocolClass,
+          protocolImpl);
     }
 
     private static RpcResponseWritable handleException(Throwable e) {