소스 검색

HADOOP-18487. Make protobuf 2.5 an optional runtime dependency. (#4996)

Protobuf 2.5 JAR is no longer needed at runtime.

The option common.protobuf.scope defines whether the protobuf 2.5.0
dependency is marked as provided or not.

* New package org.apache.hadoop.ipc.internal for internal only protobuf classes
  ...with a ShadedProtobufHelper in there which has shaded protobuf refs
  only, so guaranteed not to need protobuf-2.5 on the CP
* All uses of org.apache.hadoop.ipc.ProtobufHelper have
  been replaced by uses of org.apache.hadoop.ipc.internal.ShadedProtobufHelper
* The scope of protobuf-2.5 is set by the option common.protobuf2.scope
  In this patch is it is still "compile"
* There is explicit reference to it in modules where it may be needed.
*  The maven scope of the dependency can be set with the common.protobuf2.scope
   option. It can be set to "provided" in a build:
       -Dcommon.protobuf2.scope=provided
* Add new ipc(callable) method to catch and convert shaded protobuf
  exceptions raised during invocation of the supplied lambda expression
* This is adopted in the code where the migration is not traumatically
  over-complex. RouterAdminProtocolTranslatorPB is left alone for this
  reason.

Contributed by Steve Loughran
Steve Loughran 1 년 전
부모
커밋
48b37a7153
53개의 변경된 파일1137개의 추가작업 그리고 1335개의 파일을 삭제
  1. 24 0
      BUILDING.txt
  2. 1 2
      hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
  3. 3 2
      hadoop-common-project/hadoop-common/pom.xml
  4. 15 37
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolClientSideTranslatorPB.java
  5. 8 17
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/ZKFCProtocolClientSideTranslatorPB.java
  6. 40 44
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java
  7. 125 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufWrapperLegacy.java
  8. 4 8
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java
  9. 4 45
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcWritable.java
  10. 170 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/internal/ShadedProtobufHelper.java
  11. 28 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/internal/package-info.java
  12. 5 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/package-info.java
  13. 8 13
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protocolPB/GenericRefreshProtocolClientSideTranslatorPB.java
  14. 4 10
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protocolPB/RefreshCallQueueProtocolClientSideTranslatorPB.java
  15. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
  16. 4 10
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/protocolPB/RefreshAuthorizationPolicyProtocolClientSideTranslatorPB.java
  17. 6 15
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/protocolPB/RefreshUserMappingsProtocolClientSideTranslatorPB.java
  18. 4 7
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/protocolPB/GetUserMappingsProtocolClientSideTranslatorPB.java
  19. 89 0
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestShadedProtobufHelper.java
  20. 1 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/LambdaTestUtils.java
  21. 68 138
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientDatanodeProtocolTranslatorPB.java
  22. 172 438
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java
  23. 7 6
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java
  24. 11 23
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ReconfigurationProtocolTranslatorPB.java
  25. 1 1
      hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml
  26. 28 14
      hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/protocolPB/RouterAdminProtocolTranslatorPB.java
  27. 2 2
      hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterAdminServer.java
  28. 3 3
      hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterRpcServer.java
  29. 3 3
      hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/MockNamenode.java
  30. 1 1
      hadoop-hdfs-project/hadoop-hdfs/pom.xml
  31. 28 4
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
  32. 3 7
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeLifelineProtocolClientSideTranslatorPB.java
  33. 14 48
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeProtocolClientSideTranslatorPB.java
  34. 30 48
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/InMemoryAliasMapProtocolClientSideTranslatorPB.java
  35. 6 13
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/InterDatanodeProtocolTranslatorPB.java
  36. 14 27
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/JournalProtocolTranslatorPB.java
  37. 32 89
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/NamenodeProtocolTranslatorPB.java
  38. 11 16
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/protocolPB/InterQJournalProtocolTranslatorPB.java
  39. 104 187
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/protocolPB/QJournalProtocolTranslatorPB.java
  40. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeRpcServer.java
  41. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
  42. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java
  43. 19 19
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java
  44. 4 0
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/pom.xml
  45. 10 26
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/protocolPB/HSAdminRefreshProtocolClientSideTranslatorPB.java
  46. 1 0
      hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
  47. 7 1
      hadoop-project/pom.xml
  48. 1 0
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/pom.xml
  49. 1 0
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml
  50. 3 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceManagerAdministrationProtocolPBClientImpl.java
  51. 1 0
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-csi/pom.xml
  52. 1 0
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/pom.xml
  53. 1 0
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml

+ 24 - 0
BUILDING.txt

@@ -293,6 +293,30 @@ Maven build goals:
     package. This option requires that -Dpmdk.lib is specified. With -Dbundle.pmdk provided,
     the build will fail if -Dpmdk.lib is not specified.
 
+Controlling the redistribution of the protobuf-2.5 dependency
+
+    The protobuf 2.5.0 library is used at compile time to compile the class
+    org.apache.hadoop.ipc.ProtobufHelper; this class known to have been used by
+    external projects in the past. Protobuf 2.5 is not used elsewhere in
+    the Hadoop codebase; alongside the move to Protobuf 3.x a private successor
+    class, org.apache.hadoop.ipc.internal.ShadedProtobufHelper is now used.
+
+    The hadoop-common JAR still declares a dependency on protobuf-2.5, but this
+    is likely to change in the future. The maven scope of the dependency can be
+    set with the common.protobuf2.scope option.
+    It can be set to "provided" in a build:
+       -Dcommon.protobuf2.scope=provided
+    If this is done then protobuf-2.5.0.jar will no longer be exported as a dependency,
+    and will then be omitted from the share/hadoop/common/lib/ directory of
+    any Hadoop distribution built. Any application declaring a dependency on hadoop-commmon
+    will no longer get the dependency; if they need it then they must explicitly declare it:
+
+      <dependency>
+        <groupId>com.google.protobuf</groupId>
+        <artifactId>protobuf-java</artifactId>
+        <version>2.5.0</version>
+      </dependency>
+
 ----------------------------------------------------------------------------------
 Building components separately
 

+ 1 - 2
hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml

@@ -451,8 +451,7 @@
   </Match>
 
   <Match>
-    <Class name="org.apache.hadoop.ipc.ProtobufHelper" />
-    <Method name="getFixedByteString" />
+    <Class name="org.apache.hadoop.ipc.internal.ShadedProtobufHelper" />
     <Bug pattern="AT_OPERATION_SEQUENCE_ON_CONCURRENT_ABSTRACTION" />
   </Match>
 </FindBugsFilter>

+ 3 - 2
hadoop-common-project/hadoop-common/pom.xml

@@ -250,10 +250,11 @@
       <artifactId>re2j</artifactId>
       <scope>compile</scope>
     </dependency>
+    <!-- Needed for compilation, though no longer in production.  -->
     <dependency>
       <groupId>com.google.protobuf</groupId>
       <artifactId>protobuf-java</artifactId>
-      <scope>compile</scope>
+      <scope>${common.protobuf2.scope}</scope>
     </dependency>
     <dependency>
       <groupId>com.google.code.gson</groupId>
@@ -484,11 +485,11 @@
               <!--These classes have direct Protobuf references for backward compatibility reasons-->
               <excludes>
                 <exclude>**/ProtobufHelper.java</exclude>
-                <exclude>**/RpcWritable.java</exclude>
                 <exclude>**/ProtobufRpcEngineCallback.java</exclude>
                 <exclude>**/ProtobufRpcEngine.java</exclude>
                 <exclude>**/ProtobufRpcEngine2.java</exclude>
                 <exclude>**/ProtobufRpcEngineProtos.java</exclude>
+                <exclude>**/ProtobufWrapperLegacy.java</exclude>
               </excludes>
             </configuration>
           </execution>

+ 15 - 37
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolClientSideTranslatorPB.java

@@ -37,14 +37,13 @@ import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestPr
 import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto;
 import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto;
 import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToObserverRequestProto;
-import org.apache.hadoop.ipc.ProtobufHelper;
 import org.apache.hadoop.ipc.ProtobufRpcEngine2;
 import org.apache.hadoop.ipc.ProtocolTranslator;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.security.UserGroupInformation;
-
 import org.apache.hadoop.thirdparty.protobuf.RpcController;
-import org.apache.hadoop.thirdparty.protobuf.ServiceException;
+
+import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
 
 /**
  * This class is the client side translator to translate the requests made on
@@ -84,60 +83,39 @@ public class HAServiceProtocolClientSideTranslatorPB implements
 
   @Override
   public void monitorHealth() throws IOException {
-    try {
-      rpcProxy.monitorHealth(NULL_CONTROLLER, MONITOR_HEALTH_REQ);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    ipc(() -> rpcProxy.monitorHealth(NULL_CONTROLLER, MONITOR_HEALTH_REQ));
   }
 
   @Override
   public void transitionToActive(StateChangeRequestInfo reqInfo) throws IOException {
-    try {
-      TransitionToActiveRequestProto req =
-          TransitionToActiveRequestProto.newBuilder()
+    TransitionToActiveRequestProto req =
+        TransitionToActiveRequestProto.newBuilder()
             .setReqInfo(convert(reqInfo)).build();
-
-      rpcProxy.transitionToActive(NULL_CONTROLLER, req);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    ipc(() -> rpcProxy.transitionToActive(NULL_CONTROLLER, req));
   }
 
   @Override
   public void transitionToStandby(StateChangeRequestInfo reqInfo) throws IOException {
-    try {
-      TransitionToStandbyRequestProto req =
+    TransitionToStandbyRequestProto req =
         TransitionToStandbyRequestProto.newBuilder()
-          .setReqInfo(convert(reqInfo)).build();
-      rpcProxy.transitionToStandby(NULL_CONTROLLER, req);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+            .setReqInfo(convert(reqInfo)).build();
+    ipc(() -> rpcProxy.transitionToStandby(NULL_CONTROLLER, req));
   }
 
   @Override
   public void transitionToObserver(StateChangeRequestInfo reqInfo)
       throws IOException {
-    try {
-      TransitionToObserverRequestProto req =
-          TransitionToObserverRequestProto.newBuilder()
-              .setReqInfo(convert(reqInfo)).build();
-      rpcProxy.transitionToObserver(NULL_CONTROLLER, req);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    TransitionToObserverRequestProto req =
+        TransitionToObserverRequestProto.newBuilder()
+            .setReqInfo(convert(reqInfo)).build();
+    ipc(() -> rpcProxy.transitionToObserver(NULL_CONTROLLER, req));
   }
 
   @Override
   public HAServiceStatus getServiceStatus() throws IOException {
     GetServiceStatusResponseProto status;
-    try {
-      status = rpcProxy.getServiceStatus(NULL_CONTROLLER,
-          GET_SERVICE_STATUS_REQ);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    status = ipc(() -> rpcProxy.getServiceStatus(NULL_CONTROLLER,
+        GET_SERVICE_STATUS_REQ));
     
     HAServiceStatus ret = new HAServiceStatus(
         convert(status.getState()));

+ 8 - 17
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/ZKFCProtocolClientSideTranslatorPB.java

@@ -27,15 +27,14 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ha.ZKFCProtocol;
 import org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto;
 import org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto;
-import org.apache.hadoop.ipc.ProtobufHelper;
 import org.apache.hadoop.ipc.ProtobufRpcEngine2;
 import org.apache.hadoop.ipc.ProtocolTranslator;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UserGroupInformation;
-
 import org.apache.hadoop.thirdparty.protobuf.RpcController;
-import org.apache.hadoop.thirdparty.protobuf.ServiceException;
+
+import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
 
 
 public class ZKFCProtocolClientSideTranslatorPB implements
@@ -57,24 +56,16 @@ public class ZKFCProtocolClientSideTranslatorPB implements
   @Override
   public void cedeActive(int millisToCede) throws IOException,
       AccessControlException {
-    try {
-      CedeActiveRequestProto req = CedeActiveRequestProto.newBuilder()
-          .setMillisToCede(millisToCede)
-          .build();
-      rpcProxy.cedeActive(NULL_CONTROLLER, req);      
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    CedeActiveRequestProto req = CedeActiveRequestProto.newBuilder()
+        .setMillisToCede(millisToCede)
+        .build();
+    ipc(() -> rpcProxy.cedeActive(NULL_CONTROLLER, req));
   }
 
   @Override
   public void gracefulFailover() throws IOException, AccessControlException {
-    try {
-      rpcProxy.gracefulFailover(NULL_CONTROLLER,
-          GracefulFailoverRequestProto.getDefaultInstance());
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    ipc(() -> rpcProxy.gracefulFailover(NULL_CONTROLLER,
+        GracefulFailoverRequestProto.getDefaultInstance()));
   }
 
 

+ 40 - 44
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java

@@ -18,10 +18,10 @@
 package org.apache.hadoop.ipc;
 
 import java.io.IOException;
-import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
 import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
@@ -30,31 +30,37 @@ import org.apache.hadoop.thirdparty.protobuf.ByteString;
 import org.apache.hadoop.thirdparty.protobuf.ServiceException;
 
 /**
- * Helper methods for protobuf related RPC implementation
+ * Helper methods for protobuf related RPC implementation.
+ * This is deprecated because it references protobuf 2.5 classes
+ * as well as the shaded ones -and so needs an unshaded protobuf-2.5
+ * JAR on the classpath during execution.
+ * It MUST NOT be used internally; it is retained in case existing,
+ * external applications already use it.
+ * @deprecated hadoop code MUST use {@link ShadedProtobufHelper}.
  */
 @InterfaceAudience.Private
+@Deprecated
 public class ProtobufHelper {
+
   private ProtobufHelper() {
     // Hidden constructor for class with only static helper methods
   }
 
   /**
-   * Return the IOException thrown by the remote server wrapped in 
+   * Return the IOException thrown by the remote server wrapped in
    * ServiceException as cause.
    * @param se ServiceException that wraps IO exception thrown by the server
    * @return Exception wrapped in ServiceException or
    *         a new IOException that wraps the unexpected ServiceException.
    */
   public static IOException getRemoteException(ServiceException se) {
-    Throwable e = se.getCause();
-    if (e == null) {
-      return new IOException(se);
-    }
-    return e instanceof IOException ? (IOException) e : new IOException(se);
+    return ShadedProtobufHelper.getRemoteException(se);
   }
 
   /**
-   * Kept for backward compatible.
+   * Extract the remote exception from an unshaded version of the protobuf
+   * libraries.
+   * Kept for backward compatibility.
    * Return the IOException thrown by the remote server wrapped in
    * ServiceException as cause.
    * @param se ServiceException that wraps IO exception thrown by the server
@@ -71,29 +77,13 @@ public class ProtobufHelper {
     return e instanceof IOException ? (IOException) e : new IOException(se);
   }
 
-  /**
-   * Map used to cache fixed strings to ByteStrings. Since there is no
-   * automatic expiration policy, only use this for strings from a fixed, small
-   * set.
-   * <p/>
-   * This map should not be accessed directly. Used the getFixedByteString
-   * methods instead.
-   */
-  private final static ConcurrentHashMap<Object, ByteString>
-      FIXED_BYTESTRING_CACHE = new ConcurrentHashMap<>();
-
   /**
    * Get the ByteString for frequently used fixed and small set strings.
    * @param key string
    * @return the ByteString for frequently used fixed and small set strings.
    */
   public static ByteString getFixedByteString(Text key) {
-    ByteString value = FIXED_BYTESTRING_CACHE.get(key);
-    if (value == null) {
-      value = ByteString.copyFromUtf8(key.toString());
-      FIXED_BYTESTRING_CACHE.put(new Text(key.copyBytes()), value);
-    }
-    return value;
+    return ShadedProtobufHelper.getFixedByteString(key);
   }
 
   /**
@@ -102,34 +92,40 @@ public class ProtobufHelper {
    * @return ByteString for frequently used fixed and small set strings.
    */
   public static ByteString getFixedByteString(String key) {
-    ByteString value = FIXED_BYTESTRING_CACHE.get(key);
-    if (value == null) {
-      value = ByteString.copyFromUtf8(key);
-      FIXED_BYTESTRING_CACHE.put(key, value);
-    }
-    return value;
+    return ShadedProtobufHelper.getFixedByteString(key);
   }
 
+  /**
+   * Get the byte string of a non-null byte array.
+   * If the array is 0 bytes long, return a singleton to reduce object allocation.
+   * @param bytes bytes to convert.
+   * @return a value
+   */
   public static ByteString getByteString(byte[] bytes) {
     // return singleton to reduce object allocation
-    return (bytes.length == 0) ? ByteString.EMPTY : ByteString.copyFrom(bytes);
+    return ShadedProtobufHelper.getByteString(bytes);
   }
 
+  /**
+   * Get a token from a TokenProto payload.
+   * @param tokenProto marshalled token
+   * @return the token.
+   */
   public static Token<? extends TokenIdentifier> tokenFromProto(
       TokenProto tokenProto) {
-    Token<? extends TokenIdentifier> token = new Token<>(
-        tokenProto.getIdentifier().toByteArray(),
-        tokenProto.getPassword().toByteArray(), new Text(tokenProto.getKind()),
-        new Text(tokenProto.getService()));
-    return token;
+    return ShadedProtobufHelper.tokenFromProto(tokenProto);
   }
 
+  /**
+   * Create a {@code TokenProto} instance
+   * from a hadoop token.
+   * This builds and caches the fields
+   * (identifier, password, kind, service) but not
+   * renewer or any payload.
+   * @param tok token
+   * @return a marshallable protobuf class.
+   */
   public static TokenProto protoFromToken(Token<?> tok) {
-    TokenProto.Builder builder = TokenProto.newBuilder().
-        setIdentifier(getByteString(tok.getIdentifier())).
-        setPassword(getByteString(tok.getPassword())).
-        setKindBytes(getFixedByteString(tok.getKind())).
-        setServiceBytes(getFixedByteString(tok.getService()));
-    return builder.build();
+    return ShadedProtobufHelper.protoFromToken(tok);
   }
 }

+ 125 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufWrapperLegacy.java

@@ -0,0 +1,125 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ipc;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.util.Preconditions;
+
+/**
+ * A RpcWritable wrapper for unshaded protobuf messages.
+ * This class isolates unshaded protobuf classes from
+ * the rest of the RPC codebase, so it can operate without
+ * needing that on the classpath <i>at runtime</i>.
+ * The classes are needed at compile time; and if
+ * unshaded protobuf messages are to be marshalled, they
+ * will need to be on the classpath then.
+ * That is implicit: it is impossible to pass in a class
+ * which is a protobuf message unless that condition is met.
+ */
+@InterfaceAudience.Private
+public class ProtobufWrapperLegacy extends RpcWritable {
+
+  private com.google.protobuf.Message message;
+
+  /**
+   * Construct.
+   * The type of the parameter is Object so as to keep the casting internal
+   * to this class.
+   * @param message message to wrap.
+   * @throws IllegalArgumentException if the class is not a protobuf message.
+   */
+  public ProtobufWrapperLegacy(Object message) {
+    Preconditions.checkArgument(isUnshadedProtobufMessage(message),
+        "message class is not an unshaded protobuf message %s",
+        message.getClass());
+    this.message = (com.google.protobuf.Message) message;
+  }
+
+  public com.google.protobuf.Message getMessage() {
+    return message;
+  }
+
+
+  @Override
+  public void writeTo(ResponseBuffer out) throws IOException {
+    int length = message.getSerializedSize();
+    length += com.google.protobuf.CodedOutputStream.
+        computeUInt32SizeNoTag(length);
+    out.ensureCapacity(length);
+    message.writeDelimitedTo(out);
+  }
+
+  @SuppressWarnings("unchecked")
+  @Override
+  protected <T> T readFrom(ByteBuffer bb) throws IOException {
+    // using the parser with a byte[]-backed coded input stream is the
+    // most efficient way to deserialize a protobuf.  it has a direct
+    // path to the PB ctor that doesn't create multi-layered streams
+    // that internally buffer.
+    com.google.protobuf.CodedInputStream cis =
+        com.google.protobuf.CodedInputStream.newInstance(
+            bb.array(), bb.position() + bb.arrayOffset(), bb.remaining());
+    try {
+      cis.pushLimit(cis.readRawVarint32());
+      message = message.getParserForType().parseFrom(cis);
+      cis.checkLastTagWas(0);
+    } finally {
+      // advance over the bytes read.
+      bb.position(bb.position() + cis.getTotalBytesRead());
+    }
+    return (T) message;
+  }
+
+  /**
+   * Has protobuf been looked for and is known as absent?
+   * Saves a check on every message.
+   */
+  private static final AtomicBoolean PROTOBUF_KNOWN_NOT_FOUND =
+      new AtomicBoolean(false);
+
+  /**
+   * Is a message an unshaded protobuf message?
+   * @param payload payload
+   * @return true if protobuf.jar is on the classpath and the payload is a Message
+   */
+  public static boolean isUnshadedProtobufMessage(Object payload) {
+    if (PROTOBUF_KNOWN_NOT_FOUND.get()) {
+      // protobuf is known to be absent. fail fast without examining
+      // jars or generating exceptions.
+      return false;
+    }
+    // load the protobuf message class.
+    // if it does not load, then the payload is guaranteed not to be
+    // an unshaded protobuf message
+    // this relies on classloader caching for performance
+    try {
+      Class<?> protobufMessageClazz =
+          Class.forName("com.google.protobuf.Message");
+      return protobufMessageClazz.isAssignableFrom(payload.getClass());
+    } catch (ClassNotFoundException e) {
+      PROTOBUF_KNOWN_NOT_FOUND.set(true);
+      return false;
+    }
+
+  }
+}

+ 4 - 8
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java

@@ -31,9 +31,9 @@ import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureReq
 import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto;
 import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto;
 import org.apache.hadoop.net.NetUtils;
-
 import org.apache.hadoop.thirdparty.protobuf.RpcController;
-import org.apache.hadoop.thirdparty.protobuf.ServiceException;
+
+import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
 
 /**
  * This class maintains a cache of protocol versions and corresponding protocol
@@ -122,12 +122,8 @@ public class RpcClientUtil {
       builder.setProtocol(protocol.getName());
       builder.setRpcKind(rpcKind.toString());
       GetProtocolSignatureResponseProto resp;
-      try {
-        resp = protocolInfoProxy.getProtocolSignature(NULL_CONTROLLER,
-            builder.build());
-      } catch (ServiceException se) {
-        throw ProtobufHelper.getRemoteException(se);
-      }
+      resp = ipc(() -> protocolInfoProxy.getProtocolSignature(NULL_CONTROLLER,
+          builder.build()));
       versionMap = convertProtocolSignatureProtos(resp
           .getProtocolSignatureList());
       putVersionSignatureMap(serverAddress, protocol.getName(),

+ 4 - 45
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcWritable.java

@@ -41,9 +41,11 @@ public abstract class RpcWritable implements Writable {
     if (o instanceof RpcWritable) {
       return (RpcWritable)o;
     } else if (o instanceof Message) {
+      // hadoop shaded protobuf
       return new ProtobufWrapper((Message)o);
-    } else if (o instanceof com.google.protobuf.Message) {
-      return new ProtobufWrapperLegacy((com.google.protobuf.Message) o);
+    } else if (ProtobufWrapperLegacy.isUnshadedProtobufMessage(o)) {
+      // unshaded protobuf
+      return new ProtobufWrapperLegacy(o);
     } else if (o instanceof Writable) {
       return new WritableWrapper((Writable)o);
     }
@@ -134,49 +136,6 @@ public abstract class RpcWritable implements Writable {
     }
   }
 
-  // adapter for Protobufs.
-  static class ProtobufWrapperLegacy extends RpcWritable {
-    private com.google.protobuf.Message message;
-
-    ProtobufWrapperLegacy(com.google.protobuf.Message message) {
-      this.message = message;
-    }
-
-    com.google.protobuf.Message getMessage() {
-      return message;
-    }
-
-    @Override
-    void writeTo(ResponseBuffer out) throws IOException {
-      int length = message.getSerializedSize();
-      length += com.google.protobuf.CodedOutputStream.
-          computeUInt32SizeNoTag(length);
-      out.ensureCapacity(length);
-      message.writeDelimitedTo(out);
-    }
-
-    @SuppressWarnings("unchecked")
-    @Override
-    <T> T readFrom(ByteBuffer bb) throws IOException {
-      // using the parser with a byte[]-backed coded input stream is the
-      // most efficient way to deserialize a protobuf.  it has a direct
-      // path to the PB ctor that doesn't create multi-layered streams
-      // that internally buffer.
-      com.google.protobuf.CodedInputStream cis =
-          com.google.protobuf.CodedInputStream.newInstance(
-              bb.array(), bb.position() + bb.arrayOffset(), bb.remaining());
-      try {
-        cis.pushLimit(cis.readRawVarint32());
-        message = message.getParserForType().parseFrom(cis);
-        cis.checkLastTagWas(0);
-      } finally {
-        // advance over the bytes read.
-        bb.position(bb.position() + cis.getTotalBytesRead());
-      }
-      return (T)message;
-    }
-  }
-
   /**
    * adapter to allow decoding of writables and protobufs from a byte buffer.
    */

+ 170 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/internal/ShadedProtobufHelper.java

@@ -0,0 +1,170 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ipc.internal;
+
+import java.io.IOException;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.thirdparty.protobuf.ByteString;
+import org.apache.hadoop.thirdparty.protobuf.ServiceException;
+
+/**
+ * Helper methods for protobuf related RPC implementation using the
+ * hadoop {@code org.apache.hadoop.thirdparty.protobuf} shaded version.
+ * This is <i>absolutely private to hadoop-* modules</i>.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public final class ShadedProtobufHelper {
+
+  private ShadedProtobufHelper() {
+    // Hidden constructor for class with only static helper methods
+  }
+
+  /**
+   * Return the IOException thrown by the remote server wrapped in
+   * ServiceException as cause.
+   * The signature of this method changes with updates to the hadoop-thirdparty
+   * shaded protobuf library.
+   * @param se ServiceException that wraps IO exception thrown by the server
+   * @return Exception wrapped in ServiceException or
+   * a new IOException that wraps the unexpected ServiceException.
+   */
+  @InterfaceAudience.Private
+  @InterfaceStability.Unstable
+  public static IOException getRemoteException(ServiceException se) {
+    Throwable e = se.getCause();
+    if (e == null) {
+      return new IOException(se);
+    }
+    return e instanceof IOException
+        ? (IOException) e
+        : new IOException(se);
+  }
+
+  /**
+   * Map used to cache fixed strings to ByteStrings. Since there is no
+   * automatic expiration policy, only use this for strings from a fixed, small
+   * set.
+   * <p>
+   * This map should not be accessed directly. Used the getFixedByteString
+   * methods instead.
+   */
+  private static final ConcurrentHashMap<Object, ByteString>
+      FIXED_BYTESTRING_CACHE = new ConcurrentHashMap<>();
+
+  /**
+   * Get the ByteString for frequently used fixed and small set strings.
+   * @param key Hadoop Writable Text string
+   * @return the ByteString for frequently used fixed and small set strings.
+   */
+  public static ByteString getFixedByteString(Text key) {
+    ByteString value = FIXED_BYTESTRING_CACHE.get(key);
+    if (value == null) {
+      value = ByteString.copyFromUtf8(key.toString());
+      FIXED_BYTESTRING_CACHE.put(new Text(key.copyBytes()), value);
+    }
+    return value;
+  }
+
+  /**
+   * Get the ByteString for frequently used fixed and small set strings.
+   * @param key string
+   * @return ByteString for frequently used fixed and small set strings.
+   */
+  public static ByteString getFixedByteString(String key) {
+    ByteString value = FIXED_BYTESTRING_CACHE.get(key);
+    if (value == null) {
+      value = ByteString.copyFromUtf8(key);
+      FIXED_BYTESTRING_CACHE.put(key, value);
+    }
+    return value;
+  }
+
+  /**
+   * Get the byte string of a non-null byte array.
+   * If the array is 0 bytes long, return a singleton to reduce object allocation.
+   * @param bytes bytes to convert.
+   * @return the protobuf byte string representation of the array.
+   */
+  public static ByteString getByteString(byte[] bytes) {
+    // return singleton to reduce object allocation
+    return (bytes.length == 0)
+        ? ByteString.EMPTY
+        : ByteString.copyFrom(bytes);
+  }
+
+  /**
+   * Create a hadoop token from a protobuf token.
+   * @param tokenProto token
+   * @return a new token
+   */
+  public static Token<? extends TokenIdentifier> tokenFromProto(
+      TokenProto tokenProto) {
+    Token<? extends TokenIdentifier> token = new Token<>(
+        tokenProto.getIdentifier().toByteArray(),
+        tokenProto.getPassword().toByteArray(),
+        new Text(tokenProto.getKind()),
+        new Text(tokenProto.getService()));
+    return token;
+  }
+
+  /**
+   * Create a {@code TokenProto} instance
+   * from a hadoop token.
+   * This builds and caches the fields
+   * (identifier, password, kind, service) but not
+   * renewer or any payload.
+   * @param tok token
+   * @return a marshallable protobuf class.
+   */
+  public static TokenProto protoFromToken(Token<?> tok) {
+    TokenProto.Builder builder = TokenProto.newBuilder().
+        setIdentifier(getByteString(tok.getIdentifier())).
+        setPassword(getByteString(tok.getPassword())).
+        setKindBytes(getFixedByteString(tok.getKind())).
+        setServiceBytes(getFixedByteString(tok.getService()));
+    return builder.build();
+  }
+
+  /**
+   * Evaluate a protobuf call, converting any ServiceException to an IOException.
+   * @param call invocation to make
+   * @return the result of the call
+   * @param <T> type of the result
+   * @throws IOException any translated protobuf exception
+   */
+  public static <T> T ipc(IpcCall<T> call) throws IOException {
+    try {
+      return call.call();
+    } catch (ServiceException e) {
+      throw getRemoteException(e);
+    }
+  }
+
+  @FunctionalInterface
+  public interface IpcCall<T> {
+    T call() throws ServiceException;
+  }
+}

+ 28 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/internal/package-info.java

@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * IPC internal classes not for any use by libraries outside
+ * the apache hadoop source tree.
+ */
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce", "YARN"})
+@InterfaceStability.Unstable
+package org.apache.hadoop.ipc.internal;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;

+ 5 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/package-info.java

@@ -18,8 +18,12 @@
 
 /**
  * Tools to help define network clients and servers.
+ * Other ASF projects use this package, often with their own shaded/unshaded
+ * versions of protobuf messages.
+ * Changes to the API signatures will break things, especially changes to
+ * {@link org.apache.hadoop.ipc.RPC} and {@link org.apache.hadoop.ipc.RpcEngine}.
  */
-@InterfaceAudience.LimitedPrivate({"HBase", "HDFS", "MapReduce"})
+@InterfaceAudience.LimitedPrivate({"HBase", "HDFS", "MapReduce", "YARN", "Hive", "Ozone"})
 @InterfaceStability.Evolving
 package org.apache.hadoop.ipc;
 import org.apache.hadoop.classification.InterfaceAudience;

+ 8 - 13
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protocolPB/GenericRefreshProtocolClientSideTranslatorPB.java

@@ -25,7 +25,6 @@ import java.util.Arrays;
 import java.util.Collection;
 import java.util.List;
 
-import org.apache.hadoop.ipc.ProtobufHelper;
 import org.apache.hadoop.ipc.ProtocolMetaInterface;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.RefreshResponse;
@@ -34,9 +33,9 @@ import org.apache.hadoop.ipc.GenericRefreshProtocol;
 import org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto;
 import org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto;
 import org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto;
-
 import org.apache.hadoop.thirdparty.protobuf.RpcController;
-import org.apache.hadoop.thirdparty.protobuf.ServiceException;
+
+import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
 
 public class GenericRefreshProtocolClientSideTranslatorPB implements
     ProtocolMetaInterface, GenericRefreshProtocol, Closeable {
@@ -59,17 +58,13 @@ public class GenericRefreshProtocolClientSideTranslatorPB implements
   public Collection<RefreshResponse> refresh(String identifier, String[] args) throws IOException {
     List<String> argList = Arrays.asList(args);
 
-    try {
-      GenericRefreshRequestProto request = GenericRefreshRequestProto.newBuilder()
-        .setIdentifier(identifier)
-        .addAllArgs(argList)
-        .build();
+    GenericRefreshRequestProto request = GenericRefreshRequestProto.newBuilder()
+        .setIdentifier(identifier).addAllArgs(argList).build();
+
+    GenericRefreshResponseCollectionProto resp = ipc(() ->
+        rpcProxy.refresh(NULL_CONTROLLER, request));
+    return unpack(resp);
 
-      GenericRefreshResponseCollectionProto resp = rpcProxy.refresh(NULL_CONTROLLER, request);
-      return unpack(resp);
-    } catch (ServiceException se) {
-      throw ProtobufHelper.getRemoteException(se);
-    }
   }
 
   private Collection<RefreshResponse> unpack(GenericRefreshResponseCollectionProto collection) {

+ 4 - 10
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protocolPB/RefreshCallQueueProtocolClientSideTranslatorPB.java

@@ -21,16 +21,14 @@ package org.apache.hadoop.ipc.protocolPB;
 import java.io.Closeable;
 import java.io.IOException;
 
-import org.apache.hadoop.ipc.ProtobufHelper;
 import org.apache.hadoop.ipc.ProtocolMetaInterface;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.RpcClientUtil;
 import org.apache.hadoop.ipc.RefreshCallQueueProtocol;
 import org.apache.hadoop.ipc.proto.RefreshCallQueueProtocolProtos.RefreshCallQueueRequestProto;
-import org.apache.hadoop.ipc.protocolPB.RefreshCallQueueProtocolPB;
-
 import org.apache.hadoop.thirdparty.protobuf.RpcController;
-import org.apache.hadoop.thirdparty.protobuf.ServiceException;
+
+import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
 
 public class RefreshCallQueueProtocolClientSideTranslatorPB implements
     ProtocolMetaInterface, RefreshCallQueueProtocol, Closeable {
@@ -55,12 +53,8 @@ public class RefreshCallQueueProtocolClientSideTranslatorPB implements
 
   @Override
   public void refreshCallQueue() throws IOException {
-    try {
-      rpcProxy.refreshCallQueue(NULL_CONTROLLER,
-          VOID_REFRESH_CALL_QUEUE_REQUEST);
-    } catch (ServiceException se) {
-      throw ProtobufHelper.getRemoteException(se);
-    }
+    ipc(() -> rpcProxy.refreshCallQueue(NULL_CONTROLLER,
+        VOID_REFRESH_CALL_QUEUE_REQUEST));
   }
 
   @Override

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.security;
 
+import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
 import org.apache.hadoop.thirdparty.protobuf.ByteString;
 
 import java.io.BufferedInputStream;
@@ -46,7 +47,6 @@ import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
-import org.apache.hadoop.ipc.ProtobufHelper;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.security.proto.SecurityProtos.CredentialsKVProto;
@@ -382,7 +382,7 @@ public class Credentials implements Writable {
       CredentialsKVProto.Builder kv = CredentialsKVProto.newBuilder().
           setAliasBytes(ByteString.copyFrom(
               e.getKey().getBytes(), 0, e.getKey().getLength())).
-          setToken(ProtobufHelper.protoFromToken(e.getValue()));
+          setToken(ShadedProtobufHelper.protoFromToken(e.getValue()));
       storage.addTokens(kv.build());
     }
 
@@ -404,7 +404,7 @@ public class Credentials implements Writable {
     CredentialsProto storage = CredentialsProto.parseDelimitedFrom((DataInputStream)in);
     for (CredentialsKVProto kv : storage.getTokensList()) {
       addToken(new Text(kv.getAliasBytes().toByteArray()),
-               ProtobufHelper.tokenFromProto(kv.getToken()));
+          ShadedProtobufHelper.tokenFromProto(kv.getToken()));
     }
     for (CredentialsKVProto kv : storage.getSecretsList()) {
       addSecretKey(new Text(kv.getAliasBytes().toByteArray()),

+ 4 - 10
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/protocolPB/RefreshAuthorizationPolicyProtocolClientSideTranslatorPB.java

@@ -21,16 +21,14 @@ package org.apache.hadoop.security.protocolPB;
 import java.io.Closeable;
 import java.io.IOException;
 
-import org.apache.hadoop.ipc.ProtobufHelper;
 import org.apache.hadoop.ipc.ProtocolMetaInterface;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.RpcClientUtil;
 import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
 import org.apache.hadoop.security.proto.RefreshAuthorizationPolicyProtocolProtos.RefreshServiceAclRequestProto;
-import org.apache.hadoop.security.protocolPB.RefreshAuthorizationPolicyProtocolPB;
-
 import org.apache.hadoop.thirdparty.protobuf.RpcController;
-import org.apache.hadoop.thirdparty.protobuf.ServiceException;
+
+import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
 
 public class RefreshAuthorizationPolicyProtocolClientSideTranslatorPB implements
     ProtocolMetaInterface, RefreshAuthorizationPolicyProtocol, Closeable {
@@ -55,12 +53,8 @@ public class RefreshAuthorizationPolicyProtocolClientSideTranslatorPB implements
 
   @Override
   public void refreshServiceAcl() throws IOException {
-    try {
-      rpcProxy.refreshServiceAcl(NULL_CONTROLLER,
-          VOID_REFRESH_SERVICE_ACL_REQUEST);
-    } catch (ServiceException se) {
-      throw ProtobufHelper.getRemoteException(se);
-    }
+    ipc(() -> rpcProxy.refreshServiceAcl(NULL_CONTROLLER,
+        VOID_REFRESH_SERVICE_ACL_REQUEST));
   }
 
   @Override

+ 6 - 15
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/protocolPB/RefreshUserMappingsProtocolClientSideTranslatorPB.java

@@ -21,16 +21,15 @@ package org.apache.hadoop.security.protocolPB;
 import java.io.Closeable;
 import java.io.IOException;
 
-import org.apache.hadoop.ipc.ProtobufHelper;
 import org.apache.hadoop.ipc.ProtocolMetaInterface;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.RpcClientUtil;
 import org.apache.hadoop.security.RefreshUserMappingsProtocol;
 import org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos.RefreshSuperUserGroupsConfigurationRequestProto;
 import org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos.RefreshUserToGroupsMappingsRequestProto;
-
 import org.apache.hadoop.thirdparty.protobuf.RpcController;
-import org.apache.hadoop.thirdparty.protobuf.ServiceException;
+
+import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
 
 public class RefreshUserMappingsProtocolClientSideTranslatorPB implements
     ProtocolMetaInterface, RefreshUserMappingsProtocol, Closeable {
@@ -59,22 +58,14 @@ public class RefreshUserMappingsProtocolClientSideTranslatorPB implements
 
   @Override
   public void refreshUserToGroupsMappings() throws IOException {
-    try {
-      rpcProxy.refreshUserToGroupsMappings(NULL_CONTROLLER,
-          VOID_REFRESH_USER_TO_GROUPS_MAPPING_REQUEST);
-    } catch (ServiceException se) {
-      throw ProtobufHelper.getRemoteException(se);
-    }
+    ipc(() -> rpcProxy.refreshUserToGroupsMappings(NULL_CONTROLLER,
+        VOID_REFRESH_USER_TO_GROUPS_MAPPING_REQUEST));
   }
 
   @Override
   public void refreshSuperUserGroupsConfiguration() throws IOException {
-    try {
-      rpcProxy.refreshSuperUserGroupsConfiguration(NULL_CONTROLLER,
-          VOID_REFRESH_SUPERUSER_GROUPS_CONFIGURATION_REQUEST);
-    } catch (ServiceException se) {
-      throw ProtobufHelper.getRemoteException(se);
-    }
+    ipc(() -> rpcProxy.refreshSuperUserGroupsConfiguration(NULL_CONTROLLER,
+        VOID_REFRESH_SUPERUSER_GROUPS_CONFIGURATION_REQUEST));
   }
 
   @Override

+ 4 - 7
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/protocolPB/GetUserMappingsProtocolClientSideTranslatorPB.java

@@ -20,7 +20,7 @@ package org.apache.hadoop.tools.protocolPB;
 
 import java.io.Closeable;
 import java.io.IOException;
-import org.apache.hadoop.ipc.ProtobufHelper;
+
 import org.apache.hadoop.ipc.ProtocolMetaInterface;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.RpcClientUtil;
@@ -29,7 +29,8 @@ import org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForU
 import org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto;
 
 import org.apache.hadoop.thirdparty.protobuf.RpcController;
-import org.apache.hadoop.thirdparty.protobuf.ServiceException;
+
+import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
 
 public class GetUserMappingsProtocolClientSideTranslatorPB implements
     ProtocolMetaInterface, GetUserMappingsProtocol, Closeable {
@@ -53,11 +54,7 @@ public class GetUserMappingsProtocolClientSideTranslatorPB implements
     GetGroupsForUserRequestProto request = GetGroupsForUserRequestProto
         .newBuilder().setUser(user).build();
     GetGroupsForUserResponseProto resp;
-    try {
-      resp = rpcProxy.getGroupsForUser(NULL_CONTROLLER, request);
-    } catch (ServiceException se) {
-      throw ProtobufHelper.getRemoteException(se);
-    }
+    resp = ipc(() -> rpcProxy.getGroupsForUser(NULL_CONTROLLER, request));
     return resp.getGroupsList().toArray(new String[resp.getGroupsCount()]);
   }
 

+ 89 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestShadedProtobufHelper.java

@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ipc;
+
+import java.io.IOException;
+
+import org.junit.Test;
+
+import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
+import org.apache.hadoop.test.AbstractHadoopTestBase;
+import org.apache.hadoop.thirdparty.protobuf.ServiceException;
+
+import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
+import static org.apache.hadoop.test.LambdaTestUtils.intercept;
+import static org.apache.hadoop.test.LambdaTestUtils.verifyCause;
+
+/**
+ * Test methods in {@link ShadedProtobufHelper}.
+ */
+public class TestShadedProtobufHelper extends AbstractHadoopTestBase {
+
+  @Test
+  public void testExtractRemoteExceptionNoCause() throws Throwable {
+    ServiceException source = new ServiceException("empty");
+
+    IOException ex = ShadedProtobufHelper.getRemoteException(source);
+    verifyCause(ServiceException.class, ex);
+  }
+
+  @Test
+  public void testExtractRemoteExceptionIOECause() throws Throwable {
+    IOException source = new IOException("ioe");
+
+    IOException ex = ShadedProtobufHelper.getRemoteException(
+        new ServiceException(source));
+    // if not the same, throw
+    if (!(ex == source)) {
+      throw ex;
+    }
+  }
+
+  @Test
+  public void testExtractRemoteExceptionOtherCause() throws Throwable {
+    NullPointerException source = new NullPointerException("npe");
+
+    IOException ex = ShadedProtobufHelper.getRemoteException(
+        new ServiceException(source));
+    // if not the same, throw
+    ServiceException c1 = verifyCause(ServiceException.class, ex);
+    verifyCause(NullPointerException.class, c1);
+  }
+
+  @Test
+  public void testIPCWrapperServiceException() throws Throwable {
+    intercept(IOException.class, "expected", () -> {
+      ipc(() -> {
+        throw new ServiceException("expected");
+      });
+    });
+  }
+
+  @Test
+  public void testIPCWrapperNPE() throws Throwable {
+    final IOException ex = intercept(IOException.class, "npe", () -> {
+      ipc(() -> {
+        throw new ServiceException(new NullPointerException("npe"));
+      });
+    });
+    ServiceException c1 = verifyCause(ServiceException.class, ex);
+    verifyCause(NullPointerException.class, c1);
+  }
+
+}

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/LambdaTestUtils.java

@@ -819,7 +819,7 @@ public final class LambdaTestUtils {
     if (cause == null || !clazz.isAssignableFrom(cause.getClass())) {
       throw caught;
     } else {
-      return (E) caught;
+      return (E) cause;
     }
   }
 

+ 68 - 138
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientDatanodeProtocolTranslatorPB.java

@@ -67,7 +67,6 @@ import org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.DiskBa
 import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
 import org.apache.hadoop.hdfs.server.datanode.DiskBalancerWorkStatus;
 import org.apache.hadoop.hdfs.server.datanode.DiskBalancerWorkStatus.Result;
-import org.apache.hadoop.ipc.ProtobufHelper;
 import org.apache.hadoop.ipc.ProtobufRpcEngine2;
 import org.apache.hadoop.ipc.ProtocolMetaInterface;
 import org.apache.hadoop.ipc.ProtocolTranslator;
@@ -76,13 +75,13 @@ import org.apache.hadoop.ipc.RpcClientUtil;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
-
 import org.apache.hadoop.thirdparty.protobuf.RpcController;
-import org.apache.hadoop.thirdparty.protobuf.ServiceException;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
+
 /**
  * This class is the client side translator to translate the requests made on
  * {@link ClientDatanodeProtocol} interfaces to the RPC server implementing
@@ -197,31 +196,19 @@ public class ClientDatanodeProtocolTranslatorPB implements
     GetReplicaVisibleLengthRequestProto req =
         GetReplicaVisibleLengthRequestProto.newBuilder()
             .setBlock(PBHelperClient.convert(b)).build();
-    try {
-      return rpcProxy.getReplicaVisibleLength(NULL_CONTROLLER, req).getLength();
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    return ipc(() -> rpcProxy.getReplicaVisibleLength(NULL_CONTROLLER, req).getLength());
   }
 
   @Override
   public void refreshNamenodes() throws IOException {
-    try {
-      rpcProxy.refreshNamenodes(NULL_CONTROLLER, VOID_REFRESH_NAMENODES);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    ipc(() -> rpcProxy.refreshNamenodes(NULL_CONTROLLER, VOID_REFRESH_NAMENODES));
   }
 
   @Override
   public void deleteBlockPool(String bpid, boolean force) throws IOException {
     DeleteBlockPoolRequestProto req = DeleteBlockPoolRequestProto.newBuilder()
         .setBlockPool(bpid).setForce(force).build();
-    try {
-      rpcProxy.deleteBlockPool(NULL_CONTROLLER, req);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    ipc(() -> rpcProxy.deleteBlockPool(NULL_CONTROLLER, req));
   }
 
   @Override
@@ -232,11 +219,7 @@ public class ClientDatanodeProtocolTranslatorPB implements
             .setBlock(PBHelperClient.convert(block))
             .setToken(PBHelperClient.convert(token)).build();
     GetBlockLocalPathInfoResponseProto resp;
-    try {
-      resp = rpcProxy.getBlockLocalPathInfo(NULL_CONTROLLER, req);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    resp = ipc(() -> rpcProxy.getBlockLocalPathInfo(NULL_CONTROLLER, req));
     return new BlockLocalPathInfo(PBHelperClient.convert(resp.getBlock()),
         resp.getLocalPath(), resp.getLocalMetaPath());
   }
@@ -257,94 +240,61 @@ public class ClientDatanodeProtocolTranslatorPB implements
   public void shutdownDatanode(boolean forUpgrade) throws IOException {
     ShutdownDatanodeRequestProto request = ShutdownDatanodeRequestProto
         .newBuilder().setForUpgrade(forUpgrade).build();
-    try {
-      rpcProxy.shutdownDatanode(NULL_CONTROLLER, request);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    ipc(() -> rpcProxy.shutdownDatanode(NULL_CONTROLLER, request));
   }
 
   @Override
   public void evictWriters() throws IOException {
-    try {
-      rpcProxy.evictWriters(NULL_CONTROLLER, VOID_EVICT_WRITERS);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    ipc(() -> rpcProxy.evictWriters(NULL_CONTROLLER, VOID_EVICT_WRITERS));
   }
 
   @Override
   public DatanodeLocalInfo getDatanodeInfo() throws IOException {
     GetDatanodeInfoResponseProto response;
-    try {
-      response = rpcProxy.getDatanodeInfo(NULL_CONTROLLER,
-          VOID_GET_DATANODE_INFO);
-      return PBHelperClient.convert(response.getLocalInfo());
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    response = ipc(() -> rpcProxy.getDatanodeInfo(NULL_CONTROLLER,
+        VOID_GET_DATANODE_INFO));
+    return PBHelperClient.convert(response.getLocalInfo());
   }
 
   @Override
   public void startReconfiguration() throws IOException {
-    try {
-      rpcProxy.startReconfiguration(NULL_CONTROLLER, VOID_START_RECONFIG);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    ipc(() -> rpcProxy.startReconfiguration(NULL_CONTROLLER, VOID_START_RECONFIG));
   }
 
   @Override
   public ReconfigurationTaskStatus getReconfigurationStatus()
       throws IOException {
-    try {
-      return ReconfigurationProtocolUtils.getReconfigurationStatus(
-          rpcProxy
-          .getReconfigurationStatus(
-              NULL_CONTROLLER,
-              VOID_GET_RECONFIG_STATUS));
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    return ReconfigurationProtocolUtils.getReconfigurationStatus(
+        ipc(() -> rpcProxy.getReconfigurationStatus(
+            NULL_CONTROLLER,
+            VOID_GET_RECONFIG_STATUS)));
   }
 
   @Override
   public List<String> listReconfigurableProperties() throws IOException {
     ListReconfigurablePropertiesResponseProto response;
-    try {
-      response = rpcProxy.listReconfigurableProperties(NULL_CONTROLLER,
-          VOID_LIST_RECONFIGURABLE_PROPERTIES);
-      return response.getNameList();
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    response = ipc(() -> rpcProxy.listReconfigurableProperties(NULL_CONTROLLER,
+        VOID_LIST_RECONFIGURABLE_PROPERTIES));
+    return response.getNameList();
   }
 
   @Override
   public void triggerBlockReport(BlockReportOptions options)
       throws IOException {
-    try {
-      TriggerBlockReportRequestProto.Builder builder = TriggerBlockReportRequestProto.newBuilder().
-          setIncremental(options.isIncremental());
-      if (options.getNamenodeAddr() != null) {
-        builder.setNnAddress(NetUtils.getHostPortString(options.getNamenodeAddr()));
-      }
-      rpcProxy.triggerBlockReport(NULL_CONTROLLER, builder.build());
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
+    TriggerBlockReportRequestProto.Builder builder = TriggerBlockReportRequestProto.newBuilder().
+        setIncremental(options.isIncremental());
+    if (options.getNamenodeAddr() != null) {
+      builder.setNnAddress(NetUtils.getHostPortString(options.getNamenodeAddr()));
     }
+    ipc(() -> rpcProxy.triggerBlockReport(NULL_CONTROLLER, builder.build()));
   }
 
   @Override
   public long getBalancerBandwidth() throws IOException {
     GetBalancerBandwidthResponseProto response;
-    try {
-      response = rpcProxy.getBalancerBandwidth(NULL_CONTROLLER,
-          VOID_GET_BALANCER_BANDWIDTH);
-      return response.getBandwidth();
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    response = ipc(() -> rpcProxy.getBalancerBandwidth(NULL_CONTROLLER,
+        VOID_GET_BALANCER_BANDWIDTH));
+    return response.getBandwidth();
   }
 
   /**
@@ -363,19 +313,15 @@ public class ClientDatanodeProtocolTranslatorPB implements
   public void submitDiskBalancerPlan(String planID, long planVersion,
         String planFile, String planData, boolean skipDateCheck)
       throws IOException {
-    try {
-      SubmitDiskBalancerPlanRequestProto request =
-          SubmitDiskBalancerPlanRequestProto.newBuilder()
-              .setPlanID(planID)
-              .setPlanVersion(planVersion)
-              .setPlanFile(planFile)
-              .setPlan(planData)
-              .setIgnoreDateCheck(skipDateCheck)
-              .build();
-      rpcProxy.submitDiskBalancerPlan(NULL_CONTROLLER, request);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    SubmitDiskBalancerPlanRequestProto request =
+        SubmitDiskBalancerPlanRequestProto.newBuilder()
+            .setPlanID(planID)
+            .setPlanVersion(planVersion)
+            .setPlanFile(planFile)
+            .setPlan(planData)
+            .setIgnoreDateCheck(skipDateCheck)
+            .build();
+    ipc(() -> rpcProxy.submitDiskBalancerPlan(NULL_CONTROLLER, request));
   }
 
   /**
@@ -387,13 +333,9 @@ public class ClientDatanodeProtocolTranslatorPB implements
   @Override
   public void cancelDiskBalancePlan(String planID)
       throws IOException {
-    try {
-      CancelPlanRequestProto request = CancelPlanRequestProto.newBuilder()
-          .setPlanID(planID).build();
-      rpcProxy.cancelDiskBalancerPlan(NULL_CONTROLLER, request);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    CancelPlanRequestProto request = CancelPlanRequestProto.newBuilder()
+        .setPlanID(planID).build();
+    ipc(() -> rpcProxy.cancelDiskBalancerPlan(NULL_CONTROLLER, request));
   }
 
   /**
@@ -401,56 +343,44 @@ public class ClientDatanodeProtocolTranslatorPB implements
    */
   @Override
   public DiskBalancerWorkStatus queryDiskBalancerPlan() throws IOException {
-    try {
-      QueryPlanStatusRequestProto request =
-          QueryPlanStatusRequestProto.newBuilder().build();
-      QueryPlanStatusResponseProto response =
-          rpcProxy.queryDiskBalancerPlan(NULL_CONTROLLER, request);
-      DiskBalancerWorkStatus.Result result = Result.NO_PLAN;
-      if(response.hasResult()) {
-        result = DiskBalancerWorkStatus.Result.values()[
-            response.getResult()];
-      }
-
-      return new DiskBalancerWorkStatus(result,
-          response.hasPlanID() ? response.getPlanID() : null,
-          response.hasPlanFile() ? response.getPlanFile() : null,
-          response.hasCurrentStatus() ? response.getCurrentStatus() : null);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
+    QueryPlanStatusRequestProto request =
+        QueryPlanStatusRequestProto.newBuilder().build();
+    QueryPlanStatusResponseProto response =
+        ipc(() -> rpcProxy.queryDiskBalancerPlan(NULL_CONTROLLER, request));
+    DiskBalancerWorkStatus.Result result = Result.NO_PLAN;
+    if(response.hasResult()) {
+      result = DiskBalancerWorkStatus.Result.values()[
+          response.getResult()];
     }
+
+    return new DiskBalancerWorkStatus(result,
+        response.hasPlanID() ? response.getPlanID() : null,
+        response.hasPlanFile() ? response.getPlanFile() : null,
+        response.hasCurrentStatus() ? response.getCurrentStatus() : null);
   }
 
   @Override
   public String getDiskBalancerSetting(String key) throws IOException {
-    try {
-      DiskBalancerSettingRequestProto request =
-          DiskBalancerSettingRequestProto.newBuilder().setKey(key).build();
-      DiskBalancerSettingResponseProto response =
-          rpcProxy.getDiskBalancerSetting(NULL_CONTROLLER, request);
-      return response.hasValue() ? response.getValue() : null;
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    DiskBalancerSettingRequestProto request =
+        DiskBalancerSettingRequestProto.newBuilder().setKey(key).build();
+    DiskBalancerSettingResponseProto response =
+        ipc(() -> rpcProxy.getDiskBalancerSetting(NULL_CONTROLLER, request));
+    return response.hasValue() ? response.getValue() : null;
   }
 
   @Override
   public List<DatanodeVolumeInfo> getVolumeReport() throws IOException {
-    try {
-      List<DatanodeVolumeInfo> volumeInfoList = new ArrayList<>();
-      GetVolumeReportResponseProto volumeReport = rpcProxy.getVolumeReport(
-          NULL_CONTROLLER, VOID_GET_DATANODE_STORAGE_INFO);
-      List<DatanodeVolumeInfoProto> volumeProtoList = volumeReport
-          .getVolumeInfoList();
-      for (DatanodeVolumeInfoProto proto : volumeProtoList) {
-        volumeInfoList.add(new DatanodeVolumeInfo(proto.getPath(), proto
-            .getUsedSpace(), proto.getFreeSpace(), proto.getReservedSpace(),
-            proto.getReservedSpaceForReplicas(), proto.getNumBlocks(),
-            PBHelperClient.convertStorageType(proto.getStorageType())));
-      }
-      return volumeInfoList;
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
+    List<DatanodeVolumeInfo> volumeInfoList = new ArrayList<>();
+    GetVolumeReportResponseProto volumeReport = ipc(() -> rpcProxy.getVolumeReport(
+        NULL_CONTROLLER, VOID_GET_DATANODE_STORAGE_INFO));
+    List<DatanodeVolumeInfoProto> volumeProtoList = volumeReport
+        .getVolumeInfoList();
+    for (DatanodeVolumeInfoProto proto : volumeProtoList) {
+      volumeInfoList.add(new DatanodeVolumeInfo(proto.getPath(), proto
+          .getUsedSpace(), proto.getFreeSpace(), proto.getReservedSpace(),
+          proto.getReservedSpaceForReplicas(), proto.getNumBlocks(),
+          PBHelperClient.convertStorageType(proto.getStorageType())));
     }
+    return volumeInfoList;
   }
 }

파일 크기가 너무 크기때문에 변경 상태를 표시하지 않습니다.
+ 172 - 438
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java


+ 7 - 6
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java

@@ -207,7 +207,7 @@ import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.SlotId;
 import org.apache.hadoop.io.EnumSetWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.erasurecode.ECSchema;
-import org.apache.hadoop.ipc.ProtobufHelper;
+import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
 import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.ChunkedArrayList;
@@ -234,7 +234,7 @@ public class PBHelperClient {
       FsAction.values();
 
   private static ByteString getFixedByteString(String key) {
-    return ProtobufHelper.getFixedByteString(key);
+    return ShadedProtobufHelper.getFixedByteString(key);
   }
 
   /**
@@ -257,7 +257,8 @@ public class PBHelperClient {
 
   public static ByteString getByteString(byte[] bytes) {
     // return singleton to reduce object allocation
-    return ProtobufHelper.getByteString(bytes);
+    // return singleton to reduce object allocation
+    return ShadedProtobufHelper.getByteString(bytes);
   }
 
   public static ShmId convert(ShortCircuitShmIdProto shmId) {
@@ -325,7 +326,7 @@ public class PBHelperClient {
   }
 
   public static TokenProto convert(Token<?> tok) {
-    return ProtobufHelper.protoFromToken(tok);
+    return ShadedProtobufHelper.protoFromToken(tok);
   }
 
   public static ShortCircuitShmIdProto convert(ShmId shmId) {
@@ -803,8 +804,8 @@ public class PBHelperClient {
 
   public static Token<BlockTokenIdentifier> convert(
       TokenProto blockToken) {
-    return (Token<BlockTokenIdentifier>) ProtobufHelper
-        .tokenFromProto(blockToken);
+    return (Token<BlockTokenIdentifier>) ShadedProtobufHelper.tokenFromProto(
+        blockToken);
   }
 
   // DatanodeId

+ 11 - 23
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ReconfigurationProtocolTranslatorPB.java

@@ -33,7 +33,6 @@ import org.apache.hadoop.hdfs.protocol.proto.ReconfigurationProtocolProtos.GetRe
 import org.apache.hadoop.hdfs.protocol.proto.ReconfigurationProtocolProtos.ListReconfigurablePropertiesRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ReconfigurationProtocolProtos.ListReconfigurablePropertiesResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ReconfigurationProtocolProtos.StartReconfigurationRequestProto;
-import org.apache.hadoop.ipc.ProtobufHelper;
 import org.apache.hadoop.ipc.ProtobufRpcEngine2;
 import org.apache.hadoop.ipc.ProtocolMetaInterface;
 import org.apache.hadoop.ipc.ProtocolTranslator;
@@ -44,7 +43,8 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.thirdparty.protobuf.RpcController;
-import org.apache.hadoop.thirdparty.protobuf.ServiceException;
+
+import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
 
 /**
  * This class is the client side translator to translate the requests made on
@@ -102,37 +102,25 @@ public class ReconfigurationProtocolTranslatorPB implements
 
   @Override
   public void startReconfiguration() throws IOException {
-    try {
-      rpcProxy.startReconfiguration(NULL_CONTROLLER, VOID_START_RECONFIG);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    ipc(() -> rpcProxy.startReconfiguration(NULL_CONTROLLER, VOID_START_RECONFIG));
   }
 
   @Override
   public ReconfigurationTaskStatus getReconfigurationStatus()
       throws IOException {
-    try {
-      return ReconfigurationProtocolUtils.getReconfigurationStatus(
-          rpcProxy
-          .getReconfigurationStatus(
-              NULL_CONTROLLER,
-              VOID_GET_RECONFIG_STATUS));
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    return ReconfigurationProtocolUtils.getReconfigurationStatus(
+        ipc(() -> rpcProxy
+            .getReconfigurationStatus(
+                NULL_CONTROLLER,
+                VOID_GET_RECONFIG_STATUS)));
   }
 
   @Override
   public List<String> listReconfigurableProperties() throws IOException {
     ListReconfigurablePropertiesResponseProto response;
-    try {
-      response = rpcProxy.listReconfigurableProperties(NULL_CONTROLLER,
-          VOID_LIST_RECONFIGURABLE_PROPERTIES);
-      return response.getNameList();
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    response = ipc(() -> rpcProxy.listReconfigurableProperties(NULL_CONTROLLER,
+        VOID_LIST_RECONFIGURABLE_PROPERTIES));
+    return response.getNameList();
   }
 
   @Override

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml

@@ -151,7 +151,7 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
     <dependency>
       <groupId>com.google.protobuf</groupId>
       <artifactId>protobuf-java</artifactId>
-      <scope>compile</scope>
+      <scope>${transient.protobuf2.scope}</scope>
     </dependency>
     <dependency>
       <groupId>javax.servlet</groupId>

+ 28 - 14
hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/protocolPB/RouterAdminProtocolTranslatorPB.java

@@ -97,7 +97,6 @@ import org.apache.hadoop.hdfs.server.federation.store.protocol.impl.pb.RemoveMou
 import org.apache.hadoop.hdfs.server.federation.store.protocol.impl.pb.RemoveMountTableEntryResponsePBImpl;
 import org.apache.hadoop.hdfs.server.federation.store.protocol.impl.pb.UpdateMountTableEntryRequestPBImpl;
 import org.apache.hadoop.hdfs.server.federation.store.protocol.impl.pb.UpdateMountTableEntryResponsePBImpl;
-import org.apache.hadoop.ipc.ProtobufHelper;
 import org.apache.hadoop.ipc.ProtocolMetaInterface;
 import org.apache.hadoop.ipc.ProtocolTranslator;
 import org.apache.hadoop.ipc.RPC;
@@ -105,6 +104,8 @@ import org.apache.hadoop.ipc.RpcClientUtil;
 
 import org.apache.hadoop.thirdparty.protobuf.ServiceException;
 
+import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.getRemoteException;
+
 /**
  * This class forwards NN's ClientProtocol calls as RPC calls to the NN server
  * while translating from the parameter types used in ClientProtocol to the
@@ -150,7 +151,8 @@ public class RouterAdminProtocolTranslatorPB
           rpcProxy.addMountTableEntry(null, proto);
       return new AddMountTableEntryResponsePBImpl(response);
     } catch (ServiceException e) {
-      throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
+
+      throw new IOException(getRemoteException(e).getMessage());
     }
   }
 
@@ -165,7 +167,8 @@ public class RouterAdminProtocolTranslatorPB
           rpcProxy.updateMountTableEntry(null, proto);
       return new UpdateMountTableEntryResponsePBImpl(response);
     } catch (ServiceException e) {
-      throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
+
+      throw new IOException(getRemoteException(e).getMessage());
     }
   }
 
@@ -180,7 +183,8 @@ public class RouterAdminProtocolTranslatorPB
           rpcProxy.removeMountTableEntry(null, proto);
       return new RemoveMountTableEntryResponsePBImpl(responseProto);
     } catch (ServiceException e) {
-      throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
+
+      throw new IOException(getRemoteException(e).getMessage());
     }
   }
 
@@ -195,7 +199,8 @@ public class RouterAdminProtocolTranslatorPB
           rpcProxy.getMountTableEntries(null, proto);
       return new GetMountTableEntriesResponsePBImpl(response);
     } catch (ServiceException e) {
-      throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
+
+      throw new IOException(getRemoteException(e).getMessage());
     }
   }
 
@@ -209,7 +214,8 @@ public class RouterAdminProtocolTranslatorPB
           rpcProxy.enterSafeMode(null, proto);
       return new EnterSafeModeResponsePBImpl(response);
     } catch (ServiceException e) {
-      throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
+
+      throw new IOException(getRemoteException(e).getMessage());
     }
   }
 
@@ -223,7 +229,8 @@ public class RouterAdminProtocolTranslatorPB
           rpcProxy.leaveSafeMode(null, proto);
       return new LeaveSafeModeResponsePBImpl(response);
     } catch (ServiceException e) {
-      throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
+
+      throw new IOException(getRemoteException(e).getMessage());
     }
   }
 
@@ -237,7 +244,8 @@ public class RouterAdminProtocolTranslatorPB
           rpcProxy.getSafeMode(null, proto);
       return new GetSafeModeResponsePBImpl(response);
     } catch (ServiceException e) {
-      throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
+
+      throw new IOException(getRemoteException(e).getMessage());
     }
   }
 
@@ -252,7 +260,8 @@ public class RouterAdminProtocolTranslatorPB
           rpcProxy.disableNameservice(null, proto);
       return new DisableNameserviceResponsePBImpl(response);
     } catch (ServiceException e) {
-      throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
+
+      throw new IOException(getRemoteException(e).getMessage());
     }
   }
 
@@ -267,7 +276,8 @@ public class RouterAdminProtocolTranslatorPB
           rpcProxy.enableNameservice(null, proto);
       return new EnableNameserviceResponsePBImpl(response);
     } catch (ServiceException e) {
-      throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
+
+      throw new IOException(getRemoteException(e).getMessage());
     }
   }
 
@@ -281,7 +291,8 @@ public class RouterAdminProtocolTranslatorPB
           rpcProxy.getDisabledNameservices(null, proto);
       return new GetDisabledNameservicesResponsePBImpl(response);
     } catch (ServiceException e) {
-      throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
+
+      throw new IOException(getRemoteException(e).getMessage());
     }
   }
 
@@ -296,7 +307,8 @@ public class RouterAdminProtocolTranslatorPB
           rpcProxy.refreshMountTableEntries(null, proto);
       return new RefreshMountTableEntriesResponsePBImpl(response);
     } catch (ServiceException e) {
-      throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
+
+      throw new IOException(getRemoteException(e).getMessage());
     }
   }
 
@@ -311,7 +323,8 @@ public class RouterAdminProtocolTranslatorPB
           rpcProxy.getDestination(null, proto);
       return new GetDestinationResponsePBImpl(response);
     } catch (ServiceException e) {
-      throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
+
+      throw new IOException(getRemoteException(e).getMessage());
     }
   }
 
@@ -325,7 +338,8 @@ public class RouterAdminProtocolTranslatorPB
       return new RefreshSuperUserGroupsConfigurationResponsePBImpl(response)
           .getStatus();
     } catch (ServiceException e) {
-      throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
+
+      throw new IOException(getRemoteException(e).getMessage());
     }
   }
 }

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterAdminServer.java

@@ -194,9 +194,9 @@ public class RouterAdminServer extends AbstractService
         RefreshCallQueueProtocolProtos.RefreshCallQueueProtocolService.
         newReflectiveBlockingService(refreshCallQueueXlator);
 
-    DFSUtil.addPBProtocol(conf, GenericRefreshProtocolPB.class,
+    DFSUtil.addInternalPBProtocol(conf, GenericRefreshProtocolPB.class,
         genericRefreshService, adminServer);
-    DFSUtil.addPBProtocol(conf, RefreshCallQueueProtocolPB.class,
+    DFSUtil.addInternalPBProtocol(conf, RefreshCallQueueProtocolPB.class,
         refreshCallQueueService, adminServer);
   }
 

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterRpcServer.java

@@ -305,11 +305,11 @@ public class RouterRpcServer extends AbstractService implements ClientProtocol,
         .build();
 
     // Add all the RPC protocols that the Router implements
-    DFSUtil.addPBProtocol(
+    DFSUtil.addInternalPBProtocol(
         conf, NamenodeProtocolPB.class, nnPbService, this.rpcServer);
-    DFSUtil.addPBProtocol(conf, RefreshUserMappingsProtocolPB.class,
+    DFSUtil.addInternalPBProtocol(conf, RefreshUserMappingsProtocolPB.class,
         refreshUserMappingService, this.rpcServer);
-    DFSUtil.addPBProtocol(conf, GetUserMappingsProtocolPB.class,
+    DFSUtil.addInternalPBProtocol(conf, GetUserMappingsProtocolPB.class,
         getUserMappingService, this.rpcServer);
 
     // Set service-level authorization security policy

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/MockNamenode.java

@@ -199,7 +199,7 @@ public class MockNamenode {
     BlockingService nnProtoPbService =
         NamenodeProtocolService.newReflectiveBlockingService(
             nnProtoXlator);
-    DFSUtil.addPBProtocol(
+    DFSUtil.addInternalPBProtocol(
         conf, NamenodeProtocolPB.class, nnProtoPbService, rpcServer);
 
     DatanodeProtocolServerSideTranslatorPB dnProtoPbXlator =
@@ -207,7 +207,7 @@ public class MockNamenode {
     BlockingService dnProtoPbService =
         DatanodeProtocolService.newReflectiveBlockingService(
             dnProtoPbXlator);
-    DFSUtil.addPBProtocol(
+    DFSUtil.addInternalPBProtocol(
         conf, DatanodeProtocolPB.class, dnProtoPbService, rpcServer);
 
     HAServiceProtocolServerSideTranslatorPB haServiceProtoXlator =
@@ -215,7 +215,7 @@ public class MockNamenode {
     BlockingService haProtoPbService =
         HAServiceProtocolService.newReflectiveBlockingService(
             haServiceProtoXlator);
-    DFSUtil.addPBProtocol(
+    DFSUtil.addInternalPBProtocol(
         conf, HAServiceProtocolPB.class, haProtoPbService, rpcServer);
 
     this.rpcServer.addTerseExceptions(

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/pom.xml

@@ -135,7 +135,7 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
     <dependency>
       <groupId>com.google.protobuf</groupId>
       <artifactId>protobuf-java</artifactId>
-      <scope>compile</scope>
+      <scope>${transient.protobuf2.scope}</scope>
     </dependency>
     <dependency>
       <groupId>javax.servlet</groupId>

+ 28 - 4
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java

@@ -67,6 +67,7 @@ import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.commons.cli.PosixParser;
 import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.ParentNotDirectoryException;
 import org.apache.hadoop.fs.UnresolvedLinkException;
 import org.apache.hadoop.hdfs.server.datanode.metrics.DataNodeMetrics;
@@ -1312,7 +1313,30 @@ public class DFSUtil {
   }
 
   /**
-   * Add protobuf based protocol to the {@link org.apache.hadoop.ipc.RPC.Server}
+   * Add protobuf based protocol to the {@link org.apache.hadoop.ipc.RPC.Server}.
+   * This method is for exclusive use by the hadoop libraries, as its signature
+   * changes with the version of the shaded protobuf library it has been built with.
+   * @param conf configuration
+   * @param protocol Protocol interface
+   * @param service service that implements the protocol
+   * @param server RPC server to which the protocol &amp; implementation is
+   *               added to
+   * @throws IOException failure
+   */
+  @InterfaceAudience.Private
+  @InterfaceStability.Unstable
+  public static void addInternalPBProtocol(Configuration conf,
+      Class<?> protocol,
+      BlockingService service,
+      RPC.Server server) throws IOException {
+    RPC.setProtocolEngine(conf, protocol, ProtobufRpcEngine2.class);
+    server.addProtocol(RPC.RpcKind.RPC_PROTOCOL_BUFFER, protocol, service);
+  }
+
+  /**
+   * Add protobuf based protocol to the {@link org.apache.hadoop.ipc.RPC.Server}.
+   * Deprecated as it will only reliably compile if an unshaded protobuf library
+   * is also on the classpath.
    * @param conf configuration
    * @param protocol Protocol interface
    * @param service service that implements the protocol
@@ -1320,17 +1344,17 @@ public class DFSUtil {
    *               added to
    * @throws IOException
    */
+  @Deprecated
   public static void addPBProtocol(Configuration conf, Class<?> protocol,
       BlockingService service, RPC.Server server) throws IOException {
-    RPC.setProtocolEngine(conf, protocol, ProtobufRpcEngine2.class);
-    server.addProtocol(RPC.RpcKind.RPC_PROTOCOL_BUFFER, protocol, service);
+    addInternalPBProtocol(conf, protocol, service, server);
   }
 
   /**
    * Add protobuf based protocol to the {@link RPC.Server}.
    * This engine uses Protobuf 2.5.0. Recommended to upgrade to
    * Protobuf 3.x from hadoop-thirdparty and use
-   * {@link DFSUtil#addPBProtocol(Configuration, Class, BlockingService,
+   * {@link DFSUtil#addInternalPBProtocol(Configuration, Class, BlockingService,
    * RPC.Server)}.
    * @param conf configuration
    * @param protocol Protocol interface

+ 3 - 7
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeLifelineProtocolClientSideTranslatorPB.java

@@ -29,7 +29,6 @@ import org.apache.hadoop.hdfs.server.protocol.DatanodeLifelineProtocol;
 import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
 import org.apache.hadoop.hdfs.server.protocol.StorageReport;
 import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary;
-import org.apache.hadoop.ipc.ProtobufHelper;
 import org.apache.hadoop.ipc.ProtobufRpcEngine2;
 import org.apache.hadoop.ipc.ProtocolMetaInterface;
 import org.apache.hadoop.ipc.RPC;
@@ -38,7 +37,8 @@ import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.UserGroupInformation;
 
 import org.apache.hadoop.thirdparty.protobuf.RpcController;
-import org.apache.hadoop.thirdparty.protobuf.ServiceException;
+
+import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
 
 /**
  * This class is the client side translator to translate the requests made on
@@ -96,11 +96,7 @@ public class DatanodeLifelineProtocolClientSideTranslatorPB implements
       builder.setVolumeFailureSummary(PBHelper.convertVolumeFailureSummary(
           volumeFailureSummary));
     }
-    try {
-      rpcProxy.sendLifeline(NULL_CONTROLLER, builder.build());
-    } catch (ServiceException se) {
-      throw ProtobufHelper.getRemoteException(se);
-    }
+    ipc(() -> rpcProxy.sendLifeline(NULL_CONTROLLER, builder.build()));
   }
 
   @Override // ProtocolMetaInterface

+ 14 - 48
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeProtocolClientSideTranslatorPB.java

@@ -61,7 +61,6 @@ import org.apache.hadoop.hdfs.server.protocol.StorageBlockReport;
 import org.apache.hadoop.hdfs.server.protocol.StorageReceivedDeletedBlocks;
 import org.apache.hadoop.hdfs.server.protocol.StorageReport;
 import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary;
-import org.apache.hadoop.ipc.ProtobufHelper;
 import org.apache.hadoop.ipc.ProtobufRpcEngine2;
 import org.apache.hadoop.ipc.ProtocolMetaInterface;
 import org.apache.hadoop.ipc.RPC;
@@ -71,10 +70,11 @@ import org.apache.hadoop.security.UserGroupInformation;
 
 import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.thirdparty.protobuf.RpcController;
-import org.apache.hadoop.thirdparty.protobuf.ServiceException;
 
 import javax.annotation.Nonnull;
 
+import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
+
 /**
  * This class is the client side translator to translate the requests made on
  * {@link DatanodeProtocol} interfaces to the RPC server implementing
@@ -123,11 +123,8 @@ public class DatanodeProtocolClientSideTranslatorPB implements
     RegisterDatanodeRequestProto.Builder builder = RegisterDatanodeRequestProto
         .newBuilder().setRegistration(PBHelper.convert(registration));
     RegisterDatanodeResponseProto resp;
-    try {
-      resp = rpcProxy.registerDatanode(NULL_CONTROLLER, builder.build());
-    } catch (ServiceException se) {
-      throw ProtobufHelper.getRemoteException(se);
-    }
+    resp = ipc(() -> rpcProxy.registerDatanode(NULL_CONTROLLER, builder.build()));
+
     return PBHelper.convert(resp.getRegistration());
   }
 
@@ -164,11 +161,8 @@ public class DatanodeProtocolClientSideTranslatorPB implements
     }
 
     HeartbeatResponseProto resp;
-    try {
-      resp = rpcProxy.sendHeartbeat(NULL_CONTROLLER, builder.build());
-    } catch (ServiceException se) {
-      throw ProtobufHelper.getRemoteException(se);
-    }
+    resp = ipc(() -> rpcProxy.sendHeartbeat(NULL_CONTROLLER, builder.build()));
+
     DatanodeCommand[] cmds = new DatanodeCommand[resp.getCmdsList().size()];
     int index = 0;
     for (DatanodeCommandProto p : resp.getCmdsList()) {
@@ -214,11 +208,7 @@ public class DatanodeProtocolClientSideTranslatorPB implements
     }
     builder.setContext(PBHelper.convert(context));
     BlockReportResponseProto resp;
-    try {
-      resp = rpcProxy.blockReport(NULL_CONTROLLER, builder.build());
-    } catch (ServiceException se) {
-      throw ProtobufHelper.getRemoteException(se);
-    }
+    resp = ipc(() -> rpcProxy.blockReport(NULL_CONTROLLER, builder.build()));
     return resp.hasCmd() ? PBHelper.convert(resp.getCmd()) : null;
   }
 
@@ -234,11 +224,7 @@ public class DatanodeProtocolClientSideTranslatorPB implements
     }
     
     CacheReportResponseProto resp;
-    try {
-      resp = rpcProxy.cacheReport(NULL_CONTROLLER, builder.build());
-    } catch (ServiceException se) {
-      throw ProtobufHelper.getRemoteException(se);
-    }
+    resp = ipc(() -> rpcProxy.cacheReport(NULL_CONTROLLER, builder.build()));
     if (resp.hasCmd()) {
       return PBHelper.convert(resp.getCmd());
     }
@@ -263,11 +249,7 @@ public class DatanodeProtocolClientSideTranslatorPB implements
       }
       builder.addBlocks(repBuilder.build());
     }
-    try {
-      rpcProxy.blockReceivedAndDeleted(NULL_CONTROLLER, builder.build());
-    } catch (ServiceException se) {
-      throw ProtobufHelper.getRemoteException(se);
-    }
+    ipc(() -> rpcProxy.blockReceivedAndDeleted(NULL_CONTROLLER, builder.build()));
   }
 
   @Override
@@ -276,21 +258,13 @@ public class DatanodeProtocolClientSideTranslatorPB implements
     ErrorReportRequestProto req = ErrorReportRequestProto.newBuilder()
         .setRegistartion(PBHelper.convert(registration))
         .setErrorCode(errorCode).setMsg(msg).build();
-    try {
-      rpcProxy.errorReport(NULL_CONTROLLER, req);
-    } catch (ServiceException se) {
-      throw ProtobufHelper.getRemoteException(se);
-    }
+    ipc(() -> rpcProxy.errorReport(NULL_CONTROLLER, req));
   }
 
   @Override
   public NamespaceInfo versionRequest() throws IOException {
-    try {
-      return PBHelper.convert(rpcProxy.versionRequest(NULL_CONTROLLER,
-          VOID_VERSION_REQUEST).getInfo());
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    return PBHelper.convert(ipc(() -> rpcProxy.versionRequest(NULL_CONTROLLER,
+        VOID_VERSION_REQUEST).getInfo()));
   }
 
   @Override
@@ -301,11 +275,7 @@ public class DatanodeProtocolClientSideTranslatorPB implements
       builder.addBlocks(i, PBHelperClient.convertLocatedBlock(blocks[i]));
     }
     ReportBadBlocksRequestProto req = builder.build();
-    try {
-      rpcProxy.reportBadBlocks(NULL_CONTROLLER, req);
-    } catch (ServiceException se) {
-      throw ProtobufHelper.getRemoteException(se);
-    }
+    ipc(() -> rpcProxy.reportBadBlocks(NULL_CONTROLLER, req));
   }
 
   @Override
@@ -326,11 +296,7 @@ public class DatanodeProtocolClientSideTranslatorPB implements
       builder.addNewTargetStorages(newtargetstorages[i]);
     }
     CommitBlockSynchronizationRequestProto req = builder.build();
-    try {
-      rpcProxy.commitBlockSynchronization(NULL_CONTROLLER, req);
-    } catch (ServiceException se) {
-      throw ProtobufHelper.getRemoteException(se);
-    }
+    ipc(() -> rpcProxy.commitBlockSynchronization(NULL_CONTROLLER, req));
   }
 
   @Override // ProtocolMetaInterface

+ 30 - 48
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/InMemoryAliasMapProtocolClientSideTranslatorPB.java

@@ -16,7 +16,6 @@
  */
 package org.apache.hadoop.hdfs.protocolPB;
 
-import org.apache.hadoop.thirdparty.protobuf.ServiceException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -30,7 +29,6 @@ import org.apache.hadoop.hdfs.server.aliasmap.InMemoryAliasMapProtocol;
 import org.apache.hadoop.hdfs.server.common.FileRegion;
 import org.apache.hadoop.hdfs.server.namenode.ha.AbstractNNFailoverProxyProvider;
 import org.apache.hadoop.hdfs.server.namenode.ha.InMemoryAliasMapFailoverProxyProvider;
-import org.apache.hadoop.ipc.ProtobufHelper;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.net.NetUtils;
 import org.slf4j.Logger;
@@ -54,6 +52,7 @@ import static org.apache.hadoop.hdfs.DFSUtilClient.getNameServiceIds;
 import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.Failover.PROXY_PROVIDER_KEY_PREFIX;
 import static org.apache.hadoop.hdfs.protocol.proto.AliasMapProtocolProtos.*;
 import static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.*;
+import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
 
 /**
  * This class is the client side translator to translate requests made to the
@@ -136,29 +135,24 @@ public class InMemoryAliasMapProtocolClientSideTranslatorPB
       builder.setMarker(PBHelperClient.convert(marker.get()));
     }
     ListRequestProto request = builder.build();
-    try {
-      ListResponseProto response = rpcProxy.list(null, request);
-      List<KeyValueProto> fileRegionsList = response.getFileRegionsList();
-
-      List<FileRegion> fileRegions = fileRegionsList
-          .stream()
-          .map(kv -> new FileRegion(
-              PBHelperClient.convert(kv.getKey()),
-              PBHelperClient.convert(kv.getValue())
-          ))
-          .collect(Collectors.toList());
-      BlockProto nextMarker = response.getNextMarker();
-
-      if (nextMarker.isInitialized()) {
-        return new InMemoryAliasMap.IterationResult(fileRegions,
-            Optional.of(PBHelperClient.convert(nextMarker)));
-      } else {
-        return new InMemoryAliasMap.IterationResult(fileRegions,
-            Optional.empty());
-      }
-
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
+    ListResponseProto response = ipc(() -> rpcProxy.list(null, request));
+    List<KeyValueProto> fileRegionsList = response.getFileRegionsList();
+
+    List<FileRegion> fileRegions = fileRegionsList
+        .stream()
+        .map(kv -> new FileRegion(
+            PBHelperClient.convert(kv.getKey()),
+            PBHelperClient.convert(kv.getValue())
+        ))
+        .collect(Collectors.toList());
+    BlockProto nextMarker = response.getNextMarker();
+
+    if (nextMarker.isInitialized()) {
+      return new InMemoryAliasMap.IterationResult(fileRegions,
+          Optional.of(PBHelperClient.convert(nextMarker)));
+    } else {
+      return new InMemoryAliasMap.IterationResult(fileRegions,
+          Optional.empty());
     }
   }
 
@@ -175,19 +169,15 @@ public class InMemoryAliasMapProtocolClientSideTranslatorPB
             .newBuilder()
             .setKey(PBHelperClient.convert(block))
             .build();
-    try {
-      ReadResponseProto response = rpcProxy.read(null, request);
-
-      ProvidedStorageLocationProto providedStorageLocation =
-          response.getValue();
-      if (providedStorageLocation.isInitialized()) {
-        return Optional.of(PBHelperClient.convert(providedStorageLocation));
-      }
-      return Optional.empty();
+    ReadResponseProto response = ipc(() -> rpcProxy.read(null, request));
 
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
+    ProvidedStorageLocationProto providedStorageLocation =
+        response.getValue();
+    if (providedStorageLocation.isInitialized()) {
+      return Optional.of(PBHelperClient.convert(providedStorageLocation));
     }
+    return Optional.empty();
+
   }
 
   @Override
@@ -206,22 +196,14 @@ public class InMemoryAliasMapProtocolClientSideTranslatorPB
                 .build())
             .build();
 
-    try {
-      rpcProxy.write(null, request);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    ipc(() -> rpcProxy.write(null, request));
   }
 
   @Override
   public String getBlockPoolId() throws IOException {
-    try {
-      BlockPoolResponseProto response = rpcProxy.getBlockPoolId(null,
-          BlockPoolRequestProto.newBuilder().build());
-      return response.getBlockPoolId();
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    BlockPoolResponseProto response = ipc(() -> rpcProxy.getBlockPoolId(null,
+        BlockPoolRequestProto.newBuilder().build()));
+    return response.getBlockPoolId();
   }
 
   @Override

+ 6 - 13
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/InterDatanodeProtocolTranslatorPB.java

@@ -34,7 +34,6 @@ import org.apache.hadoop.hdfs.protocol.proto.InterDatanodeProtocolProtos.UpdateR
 import org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock;
 import org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol;
 import org.apache.hadoop.hdfs.server.protocol.ReplicaRecoveryInfo;
-import org.apache.hadoop.ipc.ProtobufHelper;
 import org.apache.hadoop.ipc.ProtobufRpcEngine2;
 import org.apache.hadoop.ipc.ProtocolMetaInterface;
 import org.apache.hadoop.ipc.RPC;
@@ -42,7 +41,8 @@ import org.apache.hadoop.ipc.RpcClientUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 
 import org.apache.hadoop.thirdparty.protobuf.RpcController;
-import org.apache.hadoop.thirdparty.protobuf.ServiceException;
+
+import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
 
 /**
  * This class is the client side translator to translate the requests made on
@@ -79,11 +79,7 @@ public class InterDatanodeProtocolTranslatorPB implements
     InitReplicaRecoveryRequestProto req = InitReplicaRecoveryRequestProto
         .newBuilder().setBlock(PBHelper.convert(rBlock)).build();
     InitReplicaRecoveryResponseProto resp;
-    try {
-      resp = rpcProxy.initReplicaRecovery(NULL_CONTROLLER, req);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    resp = ipc(() -> rpcProxy.initReplicaRecovery(NULL_CONTROLLER, req));
     if (!resp.getReplicaFound()) {
       // No replica found on the remote node.
       return null;
@@ -108,12 +104,9 @@ public class InterDatanodeProtocolTranslatorPB implements
         .setBlock(PBHelperClient.convert(oldBlock))
         .setNewLength(newLength).setNewBlockId(newBlockId)
         .setRecoveryId(recoveryId).build();
-    try {
-      return rpcProxy.updateReplicaUnderRecovery(NULL_CONTROLLER, req
-          ).getStorageUuid();
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    return ipc(() -> rpcProxy.updateReplicaUnderRecovery(NULL_CONTROLLER, req)
+        .getStorageUuid());
+
   }
 
   @Override

+ 14 - 27
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/JournalProtocolTranslatorPB.java

@@ -29,13 +29,12 @@ import org.apache.hadoop.hdfs.protocol.proto.JournalProtocolProtos.StartLogSegme
 import org.apache.hadoop.hdfs.server.protocol.FenceResponse;
 import org.apache.hadoop.hdfs.server.protocol.JournalInfo;
 import org.apache.hadoop.hdfs.server.protocol.JournalProtocol;
-import org.apache.hadoop.ipc.ProtobufHelper;
 import org.apache.hadoop.ipc.ProtocolMetaInterface;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.RpcClientUtil;
-
 import org.apache.hadoop.thirdparty.protobuf.RpcController;
-import org.apache.hadoop.thirdparty.protobuf.ServiceException;
+
+import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
 
 /**
  * This class is the client side translator to translate the requests made on
@@ -69,11 +68,17 @@ public class JournalProtocolTranslatorPB implements ProtocolMetaInterface,
         .setNumTxns(numTxns)
         .setRecords(PBHelperClient.getByteString(records))
         .build();
-    try {
-      rpcProxy.journal(NULL_CONTROLLER, req);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    ipc(() -> rpcProxy.journal(NULL_CONTROLLER, req));
+  }
+
+  @Override
+  public FenceResponse fence(JournalInfo journalInfo, long epoch,
+      String fencerInfo) throws IOException {
+    FenceRequestProto req = FenceRequestProto.newBuilder().setEpoch(epoch)
+        .setJournalInfo(PBHelper.convert(journalInfo)).build();
+    FenceResponseProto resp = ipc(() -> rpcProxy.fence(NULL_CONTROLLER, req));
+    return new FenceResponse(resp.getPreviousEpoch(),
+        resp.getLastTransactionId(), resp.getInSync());
   }
 
   @Override
@@ -84,25 +89,7 @@ public class JournalProtocolTranslatorPB implements ProtocolMetaInterface,
         .setEpoch(epoch)
         .setTxid(txid)
         .build();
-    try {
-      rpcProxy.startLogSegment(NULL_CONTROLLER, req);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
-  }
-  
-  @Override
-  public FenceResponse fence(JournalInfo journalInfo, long epoch,
-      String fencerInfo) throws IOException {
-    FenceRequestProto req = FenceRequestProto.newBuilder().setEpoch(epoch)
-        .setJournalInfo(PBHelper.convert(journalInfo)).build();
-    try {
-      FenceResponseProto resp = rpcProxy.fence(NULL_CONTROLLER, req);
-      return new FenceResponse(resp.getPreviousEpoch(),
-          resp.getLastTransactionId(), resp.getInSync());
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    ipc(() -> rpcProxy.startLogSegment(NULL_CONTROLLER, req));
   }
 
   @Override

+ 32 - 89
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/NamenodeProtocolTranslatorPB.java

@@ -51,14 +51,13 @@ import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol;
 import org.apache.hadoop.hdfs.server.protocol.NamenodeRegistration;
 import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
 import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
-import org.apache.hadoop.ipc.ProtobufHelper;
 import org.apache.hadoop.ipc.ProtocolMetaInterface;
 import org.apache.hadoop.ipc.ProtocolTranslator;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.RpcClientUtil;
-
 import org.apache.hadoop.thirdparty.protobuf.RpcController;
-import org.apache.hadoop.thirdparty.protobuf.ServiceException;
+
+import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
 
 /**
  * This class is the client side translator to translate the requests made on
@@ -107,63 +106,39 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol,
     GetBlocksRequestProto req = GetBlocksRequestProto.newBuilder()
         .setDatanode(PBHelperClient.convert((DatanodeID)datanode)).setSize(size)
         .setMinBlockSize(minBlockSize).build();
-    try {
-      return PBHelper.convert(rpcProxy.getBlocks(NULL_CONTROLLER, req)
-          .getBlocks());
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    return PBHelper.convert(ipc(() -> rpcProxy.getBlocks(NULL_CONTROLLER, req)
+        .getBlocks()));
   }
 
   @Override
   public ExportedBlockKeys getBlockKeys() throws IOException {
-    try {
-      GetBlockKeysResponseProto rsp = rpcProxy.getBlockKeys(NULL_CONTROLLER,
-          VOID_GET_BLOCKKEYS_REQUEST);
-      return rsp.hasKeys() ? PBHelper.convert(rsp.getKeys()) : null;
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    GetBlockKeysResponseProto rsp = ipc(() -> rpcProxy.getBlockKeys(NULL_CONTROLLER,
+        VOID_GET_BLOCKKEYS_REQUEST));
+    return rsp.hasKeys() ? PBHelper.convert(rsp.getKeys()) : null;
   }
 
   @Override
   public long getTransactionID() throws IOException {
-    try {
-      return rpcProxy.getTransactionId(NULL_CONTROLLER,
-          VOID_GET_TRANSACTIONID_REQUEST).getTxId();
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    return ipc(() -> rpcProxy.getTransactionId(NULL_CONTROLLER,
+        VOID_GET_TRANSACTIONID_REQUEST).getTxId());
   }
 
   @Override
   public long getMostRecentCheckpointTxId() throws IOException {
-    try {
-      return rpcProxy.getMostRecentCheckpointTxId(NULL_CONTROLLER,
-          GetMostRecentCheckpointTxIdRequestProto.getDefaultInstance()).getTxId();
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    return ipc(() -> rpcProxy.getMostRecentCheckpointTxId(NULL_CONTROLLER,
+        GetMostRecentCheckpointTxIdRequestProto.getDefaultInstance()).getTxId());
   }
 
   @Override
   public CheckpointSignature rollEditLog() throws IOException {
-    try {
-      return PBHelper.convert(rpcProxy.rollEditLog(NULL_CONTROLLER,
-          VOID_ROLL_EDITLOG_REQUEST).getSignature());
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    return PBHelper.convert(ipc(() -> rpcProxy.rollEditLog(NULL_CONTROLLER,
+        VOID_ROLL_EDITLOG_REQUEST).getSignature()));
   }
 
   @Override
   public NamespaceInfo versionRequest() throws IOException {
-    try {
-      return PBHelper.convert(rpcProxy.versionRequest(NULL_CONTROLLER,
-          VOID_VERSION_REQUEST).getInfo());
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    return PBHelper.convert(ipc(() -> rpcProxy.versionRequest(NULL_CONTROLLER,
+        VOID_VERSION_REQUEST).getInfo()));
   }
 
   @Override
@@ -172,11 +147,7 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol,
     ErrorReportRequestProto req = ErrorReportRequestProto.newBuilder()
         .setErrorCode(errorCode).setMsg(msg)
         .setRegistration(PBHelper.convert(registration)).build();
-    try {
-      rpcProxy.errorReport(NULL_CONTROLLER, req);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    ipc(() -> rpcProxy.errorReport(NULL_CONTROLLER, req));
   }
 
   @Override
@@ -184,13 +155,9 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol,
       NamenodeRegistration registration) throws IOException {
     RegisterRequestProto req = RegisterRequestProto.newBuilder()
         .setRegistration(PBHelper.convert(registration)).build();
-    try {
-      return PBHelper.convert(
-          rpcProxy.registerSubordinateNamenode(NULL_CONTROLLER, req)
-          .getRegistration());
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    return PBHelper.convert(
+        ipc(() -> rpcProxy.registerSubordinateNamenode(NULL_CONTROLLER, req)
+            .getRegistration()));
   }
 
   @Override
@@ -199,11 +166,7 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol,
     StartCheckpointRequestProto req = StartCheckpointRequestProto.newBuilder()
         .setRegistration(PBHelper.convert(registration)).build();
     NamenodeCommandProto cmd;
-    try {
-      cmd = rpcProxy.startCheckpoint(NULL_CONTROLLER, req).getCommand();
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    cmd = ipc(() -> rpcProxy.startCheckpoint(NULL_CONTROLLER, req).getCommand());
     return PBHelper.convert(cmd);
   }
 
@@ -213,11 +176,7 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol,
     EndCheckpointRequestProto req = EndCheckpointRequestProto.newBuilder()
         .setRegistration(PBHelper.convert(registration))
         .setSignature(PBHelper.convert(sig)).build();
-    try {
-      rpcProxy.endCheckpoint(NULL_CONTROLLER, req);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    ipc(() -> rpcProxy.endCheckpoint(NULL_CONTROLLER, req));
   }
 
   @Override
@@ -225,12 +184,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol,
       throws IOException {
     GetEditLogManifestRequestProto req = GetEditLogManifestRequestProto
         .newBuilder().setSinceTxId(sinceTxId).build();
-    try {
-      return PBHelper.convert(rpcProxy.getEditLogManifest(NULL_CONTROLLER, req)
-          .getManifest());
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    return PBHelper.convert(ipc(() -> rpcProxy.getEditLogManifest(NULL_CONTROLLER, req)
+        .getManifest()));
   }
 
   @Override
@@ -244,38 +199,26 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol,
   public boolean isUpgradeFinalized() throws IOException {
     IsUpgradeFinalizedRequestProto req = IsUpgradeFinalizedRequestProto
         .newBuilder().build();
-    try {
-      IsUpgradeFinalizedResponseProto response = rpcProxy.isUpgradeFinalized(
-          NULL_CONTROLLER, req);
-      return response.getIsUpgradeFinalized();
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    IsUpgradeFinalizedResponseProto response = ipc(() -> rpcProxy.isUpgradeFinalized(
+        NULL_CONTROLLER, req));
+    return response.getIsUpgradeFinalized();
   }
 
   @Override
   public boolean isRollingUpgrade() throws IOException {
     IsRollingUpgradeRequestProto req = IsRollingUpgradeRequestProto
         .newBuilder().build();
-    try {
-      IsRollingUpgradeResponseProto response = rpcProxy.isRollingUpgrade(
-          NULL_CONTROLLER, req);
-      return response.getIsRollingUpgrade();
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    IsRollingUpgradeResponseProto response = ipc(() -> rpcProxy.isRollingUpgrade(
+        NULL_CONTROLLER, req));
+    return response.getIsRollingUpgrade();
   }
 
   @Override
   public Long getNextSPSPath() throws IOException {
     GetNextSPSPathRequestProto req =
         GetNextSPSPathRequestProto.newBuilder().build();
-    try {
-      GetNextSPSPathResponseProto nextSPSPath =
-          rpcProxy.getNextSPSPath(NULL_CONTROLLER, req);
-      return nextSPSPath.hasSpsPath() ? nextSPSPath.getSpsPath() : null;
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    GetNextSPSPathResponseProto nextSPSPath =
+        ipc(() -> rpcProxy.getNextSPSPath(NULL_CONTROLLER, req));
+    return nextSPSPath.hasSpsPath() ? nextSPSPath.getSpsPath() : null;
   }
 }

+ 11 - 16
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/protocolPB/InterQJournalProtocolTranslatorPB.java

@@ -20,14 +20,12 @@
 package org.apache.hadoop.hdfs.qjournal.protocolPB;
 
 import org.apache.hadoop.thirdparty.protobuf.RpcController;
-import org.apache.hadoop.thirdparty.protobuf.ServiceException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.qjournal.protocol.InterQJournalProtocol;
 import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.GetEditLogManifestRequestProto;
 import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.GetEditLogManifestResponseProto;
 import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos;
-import org.apache.hadoop.ipc.ProtobufHelper;
 import org.apache.hadoop.ipc.ProtocolMetaInterface;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.RpcClientUtil;
@@ -35,6 +33,8 @@ import org.apache.hadoop.ipc.RpcClientUtil;
 import java.io.Closeable;
 import java.io.IOException;
 
+import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
+
 /**
  * This class is the client side translator to translate the requests made on
  * {@link InterQJournalProtocol} interfaces to the RPC server implementing
@@ -63,21 +63,16 @@ public class InterQJournalProtocolTranslatorPB implements ProtocolMetaInterface,
   public GetEditLogManifestResponseProto getEditLogManifestFromJournal(
       String jid, String nameServiceId, long sinceTxId, boolean inProgressOk)
       throws IOException {
-    try {
-      GetEditLogManifestRequestProto.Builder req;
-      req = GetEditLogManifestRequestProto.newBuilder()
-          .setJid(convertJournalId(jid))
-          .setSinceTxId(sinceTxId)
-          .setInProgressOk(inProgressOk);
-      if (nameServiceId !=null) {
-        req.setNameServiceId(nameServiceId);
-      }
-      return rpcProxy.getEditLogManifestFromJournal(NULL_CONTROLLER,
-          req.build()
-      );
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
+    GetEditLogManifestRequestProto.Builder req;
+    req = GetEditLogManifestRequestProto.newBuilder()
+        .setJid(convertJournalId(jid))
+        .setSinceTxId(sinceTxId)
+        .setInProgressOk(inProgressOk);
+    if (nameServiceId !=null) {
+      req.setNameServiceId(nameServiceId);
     }
+    return ipc(() -> rpcProxy.getEditLogManifestFromJournal(NULL_CONTROLLER,
+        req.build()));
   }
 
   private QJournalProtocolProtos.JournalIdProto convertJournalId(String jid) {

+ 104 - 187
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/protocolPB/QJournalProtocolTranslatorPB.java

@@ -63,13 +63,12 @@ import org.apache.hadoop.hdfs.qjournal.protocol.RequestInfo;
 import org.apache.hadoop.hdfs.server.common.StorageInfo;
 import org.apache.hadoop.hdfs.server.protocol.JournalProtocol;
 import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
-import org.apache.hadoop.ipc.ProtobufHelper;
 import org.apache.hadoop.ipc.ProtocolMetaInterface;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.RpcClientUtil;
-
 import org.apache.hadoop.thirdparty.protobuf.RpcController;
-import org.apache.hadoop.thirdparty.protobuf.ServiceException;
+
+import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
 
 /**
  * This class is the client side translator to translate the requests made on
@@ -97,36 +96,28 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
   @Override
   public boolean isFormatted(String journalId,
                              String nameServiceId) throws IOException {
-    try {
-      IsFormattedRequestProto.Builder req = IsFormattedRequestProto.newBuilder()
-          .setJid(convertJournalId(journalId));
-      if (nameServiceId != null) {
-        req.setNameServiceId(nameServiceId);
-      }
-
-      IsFormattedResponseProto resp = rpcProxy.isFormatted(
-          NULL_CONTROLLER, req.build());
-      return resp.getIsFormatted();
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
+    IsFormattedRequestProto.Builder req = IsFormattedRequestProto.newBuilder()
+        .setJid(convertJournalId(journalId));
+    if (nameServiceId != null) {
+      req.setNameServiceId(nameServiceId);
     }
+
+    IsFormattedResponseProto resp = ipc(() -> rpcProxy.isFormatted(
+        NULL_CONTROLLER, req.build()));
+    return resp.getIsFormatted();
   }
 
   @Override
   public GetJournalStateResponseProto getJournalState(String jid,
                                                       String nameServiceId)
       throws IOException {
-    try {
-      GetJournalStateRequestProto.Builder req = GetJournalStateRequestProto
-          .newBuilder()
-          .setJid(convertJournalId(jid));
-      if (nameServiceId != null) {
-        req.setNameServiceId(nameServiceId);
-      }
-      return rpcProxy.getJournalState(NULL_CONTROLLER, req.build());
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
+    GetJournalStateRequestProto.Builder req = GetJournalStateRequestProto
+        .newBuilder()
+        .setJid(convertJournalId(jid));
+    if (nameServiceId != null) {
+      req.setNameServiceId(nameServiceId);
     }
+    return ipc(() -> rpcProxy.getJournalState(NULL_CONTROLLER, req.build()));
   }
 
   private JournalIdProto convertJournalId(String jid) {
@@ -140,19 +131,15 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
                      String nameServiceId,
                      NamespaceInfo nsInfo,
                      boolean force) throws IOException {
-    try {
-      FormatRequestProto.Builder req = FormatRequestProto.newBuilder()
-          .setJid(convertJournalId(jid))
-          .setNsInfo(PBHelper.convert(nsInfo))
-          .setForce(force);
-      if(nameServiceId != null) {
-        req.setNameServiceId(nameServiceId);
-      }
-
-      rpcProxy.format(NULL_CONTROLLER, req.build());
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
+    FormatRequestProto.Builder req = FormatRequestProto.newBuilder()
+        .setJid(convertJournalId(jid))
+        .setNsInfo(PBHelper.convert(nsInfo))
+        .setForce(force);
+    if(nameServiceId != null) {
+      req.setNameServiceId(nameServiceId);
     }
+
+    ipc(() -> rpcProxy.format(NULL_CONTROLLER, req.build()));
   }
 
   @Override
@@ -160,20 +147,16 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
                                         String nameServiceId,
                                         NamespaceInfo nsInfo,
                                         long epoch) throws IOException {
-    try {
-      NewEpochRequestProto.Builder req = NewEpochRequestProto.newBuilder()
-          .setJid(convertJournalId(jid))
-          .setNsInfo(PBHelper.convert(nsInfo))
-          .setEpoch(epoch);
-
-      if(nameServiceId != null) {
-        req.setNameServiceId(nameServiceId);
-      }
+    NewEpochRequestProto.Builder req = NewEpochRequestProto.newBuilder()
+        .setJid(convertJournalId(jid))
+        .setNsInfo(PBHelper.convert(nsInfo))
+        .setEpoch(epoch);
 
-      return rpcProxy.newEpoch(NULL_CONTROLLER, req.build());
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
+    if(nameServiceId != null) {
+      req.setNameServiceId(nameServiceId);
     }
+
+    return ipc(() -> rpcProxy.newEpoch(NULL_CONTROLLER, req.build()));
   }
 
   @Override
@@ -187,22 +170,14 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
         .setNumTxns(numTxns)
         .setRecords(PBHelperClient.getByteString(records))
         .build();
-    try {
-      rpcProxy.journal(NULL_CONTROLLER, req);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    ipc(() -> rpcProxy.journal(NULL_CONTROLLER, req));
   }
   
   @Override
   public void heartbeat(RequestInfo reqInfo) throws IOException {
-    try {
-      rpcProxy.heartbeat(NULL_CONTROLLER, HeartbeatRequestProto.newBuilder()
-            .setReqInfo(convert(reqInfo))
-            .build());
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    ipc(() -> rpcProxy.heartbeat(NULL_CONTROLLER, HeartbeatRequestProto.newBuilder()
+        .setReqInfo(convert(reqInfo))
+        .build()));
   }
 
   private QJournalProtocolProtos.RequestInfoProto convert(
@@ -227,11 +202,7 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
         .setReqInfo(convert(reqInfo))
         .setTxid(txid).setLayoutVersion(layoutVersion)
         .build();
-    try {
-      rpcProxy.startLogSegment(NULL_CONTROLLER, req);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    ipc(() -> rpcProxy.startLogSegment(NULL_CONTROLLER, req));
   }
   
   @Override
@@ -243,11 +214,7 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
         .setStartTxId(startTxId)
         .setEndTxId(endTxId)
         .build();
-    try {
-      rpcProxy.finalizeLogSegment(NULL_CONTROLLER, req);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    ipc(() -> rpcProxy.finalizeLogSegment(NULL_CONTROLLER, req));
   }
   
   @Override
@@ -257,79 +224,58 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
         .setReqInfo(convert(reqInfo))
         .setMinTxIdToKeep(minTxIdToKeep)
         .build();
-    try {
-      rpcProxy.purgeLogs(NULL_CONTROLLER, req);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    ipc(() -> rpcProxy.purgeLogs(NULL_CONTROLLER, req));
   }
 
   @Override
   public GetEditLogManifestResponseProto getEditLogManifest(
       String jid, String nameServiceId,
        long sinceTxId, boolean inProgressOk) throws IOException {
-    try {
-      GetEditLogManifestRequestProto.Builder req;
-      req = GetEditLogManifestRequestProto.newBuilder()
-          .setJid(convertJournalId(jid))
-          .setSinceTxId(sinceTxId)
-          .setInProgressOk(inProgressOk);
-      if (nameServiceId !=null) {
-        req.setNameServiceId(nameServiceId);
-      }
-      return rpcProxy.getEditLogManifest(NULL_CONTROLLER,
-          req.build()
-          );
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
+    GetEditLogManifestRequestProto.Builder req;
+    req = GetEditLogManifestRequestProto.newBuilder()
+        .setJid(convertJournalId(jid))
+        .setSinceTxId(sinceTxId)
+        .setInProgressOk(inProgressOk);
+    if (nameServiceId !=null) {
+      req.setNameServiceId(nameServiceId);
     }
+    return ipc(() -> rpcProxy.getEditLogManifest(NULL_CONTROLLER,
+        req.build()));
   }
 
   @Override
   public GetJournaledEditsResponseProto getJournaledEdits(String jid,
       String nameServiceId, long sinceTxId, int maxTxns) throws IOException {
-    try {
-      GetJournaledEditsRequestProto.Builder req =
-          GetJournaledEditsRequestProto.newBuilder()
-              .setJid(convertJournalId(jid))
-              .setSinceTxId(sinceTxId)
-              .setMaxTxns(maxTxns);
-      if (nameServiceId != null) {
-        req.setNameServiceId(nameServiceId);
-      }
-      return rpcProxy.getJournaledEdits(NULL_CONTROLLER, req.build());
-    } catch (ServiceException se) {
-      throw ProtobufHelper.getRemoteException(se);
+    GetJournaledEditsRequestProto.Builder req =
+        GetJournaledEditsRequestProto.newBuilder()
+            .setJid(convertJournalId(jid))
+            .setSinceTxId(sinceTxId)
+            .setMaxTxns(maxTxns);
+    if (nameServiceId != null) {
+      req.setNameServiceId(nameServiceId);
     }
+    return ipc(() -> rpcProxy.getJournaledEdits(NULL_CONTROLLER, req.build()));
   }
 
   @Override
   public PrepareRecoveryResponseProto prepareRecovery(RequestInfo reqInfo,
       long segmentTxId) throws IOException {
-    try {
-      return rpcProxy.prepareRecovery(NULL_CONTROLLER,
-          PrepareRecoveryRequestProto.newBuilder()
+    return ipc(() -> rpcProxy.prepareRecovery(NULL_CONTROLLER,
+        PrepareRecoveryRequestProto.newBuilder()
             .setReqInfo(convert(reqInfo))
             .setSegmentTxId(segmentTxId)
-            .build());
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+            .build()));
   }
 
   @Override
   public void acceptRecovery(RequestInfo reqInfo,
       SegmentStateProto stateToAccept, URL fromUrl) throws IOException {
-    try {
-      rpcProxy.acceptRecovery(NULL_CONTROLLER,
-          AcceptRecoveryRequestProto.newBuilder()
+    ipc(() -> rpcProxy.acceptRecovery(NULL_CONTROLLER,
+        AcceptRecoveryRequestProto.newBuilder()
             .setReqInfo(convert(reqInfo))
             .setStateToAccept(stateToAccept)
             .setFromURL(fromUrl.toExternalForm())
-            .build());
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+            .build()));
   }
 
   public boolean isMethodSupported(String methodName) throws IOException {
@@ -340,42 +286,30 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
 
   @Override
   public void doPreUpgrade(String jid) throws IOException {
-    try {
-      DoPreUpgradeRequestProto.Builder req;
-      req = DoPreUpgradeRequestProto.newBuilder()
-          .setJid(convertJournalId(jid));
-      rpcProxy.doPreUpgrade(NULL_CONTROLLER, req.build());
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+    DoPreUpgradeRequestProto.Builder req;
+    req = DoPreUpgradeRequestProto.newBuilder()
+        .setJid(convertJournalId(jid));
+    ipc(() -> rpcProxy.doPreUpgrade(NULL_CONTROLLER, req.build()));
   }
 
   @Override
   public void doUpgrade(String journalId, StorageInfo sInfo) throws IOException {
-    try {
-      rpcProxy.doUpgrade(NULL_CONTROLLER,
-          DoUpgradeRequestProto.newBuilder()
+    ipc(() -> rpcProxy.doUpgrade(NULL_CONTROLLER,
+        DoUpgradeRequestProto.newBuilder()
             .setJid(convertJournalId(journalId))
             .setSInfo(PBHelper.convert(sInfo))
-            .build());
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
+            .build()));
   }
   
   @Override
   public void doFinalize(String jid, String nameServiceId) throws IOException {
-    try {
-      DoFinalizeRequestProto.Builder req = DoFinalizeRequestProto
-          .newBuilder()
-          .setJid(convertJournalId(jid));
-      if (nameServiceId != null) {
-        req.setNameServiceId(nameServiceId);
-      }
-      rpcProxy.doFinalize(NULL_CONTROLLER, req.build());
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
+    DoFinalizeRequestProto.Builder req = DoFinalizeRequestProto
+        .newBuilder()
+        .setJid(convertJournalId(jid));
+    if (nameServiceId != null) {
+      req.setNameServiceId(nameServiceId);
     }
+    ipc(() -> rpcProxy.doFinalize(NULL_CONTROLLER, req.build()));
   }
 
   @Override
@@ -384,37 +318,29 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
                              StorageInfo storage,
                              StorageInfo prevStorage,
                              int targetLayoutVersion) throws IOException {
-    try {
-      CanRollBackRequestProto.Builder req = CanRollBackRequestProto.newBuilder()
-          .setJid(convertJournalId(journalId))
-          .setStorage(PBHelper.convert(storage))
-          .setPrevStorage(PBHelper.convert(prevStorage))
-          .setTargetLayoutVersion(targetLayoutVersion);
-      if (nameServiceId != null) {
-        req.setNameServiceId(nameServiceId);
-      }
-      CanRollBackResponseProto response = rpcProxy.canRollBack(
-          NULL_CONTROLLER, req.build());
-      return response.getCanRollBack();
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
+    CanRollBackRequestProto.Builder req = CanRollBackRequestProto.newBuilder()
+        .setJid(convertJournalId(journalId))
+        .setStorage(PBHelper.convert(storage))
+        .setPrevStorage(PBHelper.convert(prevStorage))
+        .setTargetLayoutVersion(targetLayoutVersion);
+    if (nameServiceId != null) {
+      req.setNameServiceId(nameServiceId);
     }
+    CanRollBackResponseProto response = ipc(() -> rpcProxy.canRollBack(
+        NULL_CONTROLLER, req.build()));
+    return response.getCanRollBack();
   }
 
   @Override
   public void doRollback(String journalId,
                          String nameServiceId) throws IOException {
-    try {
-      DoRollbackRequestProto.Builder req = DoRollbackRequestProto.newBuilder()
-          .setJid(convertJournalId(journalId));
+    DoRollbackRequestProto.Builder req = DoRollbackRequestProto.newBuilder()
+        .setJid(convertJournalId(journalId));
 
-      if (nameServiceId != null) {
-        req.setNameserviceId(nameServiceId);
-      }
-      rpcProxy.doRollback(NULL_CONTROLLER, req.build());
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
+    if (nameServiceId != null) {
+      req.setNameserviceId(nameServiceId);
     }
+    ipc(() -> rpcProxy.doRollback(NULL_CONTROLLER, req.build()));
   }
 
   @Override
@@ -422,37 +348,28 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
                               String nameServiceId,
                               long startTxId)
       throws IOException {
-    try {
-      DiscardSegmentsRequestProto.Builder req = DiscardSegmentsRequestProto
-          .newBuilder()
-          .setJid(convertJournalId(journalId)).setStartTxId(startTxId);
+    DiscardSegmentsRequestProto.Builder req = DiscardSegmentsRequestProto
+        .newBuilder()
+        .setJid(convertJournalId(journalId)).setStartTxId(startTxId);
 
-      if (nameServiceId != null) {
-        req.setNameServiceId(nameServiceId);
-      }
-      rpcProxy.discardSegments(NULL_CONTROLLER, req.build());
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
+    if (nameServiceId != null) {
+      req.setNameServiceId(nameServiceId);
     }
+    ipc(() -> rpcProxy.discardSegments(NULL_CONTROLLER, req.build()));
   }
 
   @Override
   public Long getJournalCTime(String journalId,
                               String nameServiceId) throws IOException {
-    try {
-
-      GetJournalCTimeRequestProto.Builder req = GetJournalCTimeRequestProto
-          .newBuilder()
-          .setJid(convertJournalId(journalId));
-      if(nameServiceId !=null) {
-        req.setNameServiceId(nameServiceId);
-      }
-      GetJournalCTimeResponseProto response = rpcProxy.getJournalCTime(
-          NULL_CONTROLLER, req.build());
-      return response.getResultCTime();
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
+    GetJournalCTimeRequestProto.Builder req = GetJournalCTimeRequestProto
+        .newBuilder()
+        .setJid(convertJournalId(journalId));
+    if(nameServiceId !=null) {
+      req.setNameServiceId(nameServiceId);
     }
+    GetJournalCTimeResponseProto response = ipc(() -> rpcProxy.getJournalCTime(
+        NULL_CONTROLLER, req.build()));
+    return response.getResultCTime();
   }
 
 }

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeRpcServer.java

@@ -109,7 +109,7 @@ public class JournalNodeRpcServer implements QJournalProtocol,
     BlockingService interQJournalProtocolService = InterQJournalProtocolService
         .newReflectiveBlockingService(qJournalProtocolServerSideTranslatorPB);
 
-    DFSUtil.addPBProtocol(confCopy, InterQJournalProtocolPB.class,
+    DFSUtil.addInternalPBProtocol(confCopy, InterQJournalProtocolPB.class,
         interQJournalProtocolService, server);
 
 

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java

@@ -1350,14 +1350,14 @@ public class DataNode extends ReconfigurableBase
         = new ReconfigurationProtocolServerSideTranslatorPB(this);
     service = ReconfigurationProtocolService
         .newReflectiveBlockingService(reconfigurationProtocolXlator);
-    DFSUtil.addPBProtocol(getConf(), ReconfigurationProtocolPB.class, service,
+    DFSUtil.addInternalPBProtocol(getConf(), ReconfigurationProtocolPB.class, service,
         ipcServer);
 
     InterDatanodeProtocolServerSideTranslatorPB interDatanodeProtocolXlator = 
         new InterDatanodeProtocolServerSideTranslatorPB(this);
     service = InterDatanodeProtocolService
         .newReflectiveBlockingService(interDatanodeProtocolXlator);
-    DFSUtil.addPBProtocol(getConf(), InterDatanodeProtocolPB.class, service,
+    DFSUtil.addInternalPBProtocol(getConf(), InterDatanodeProtocolPB.class, service,
         ipcServer);
 
     LOG.info("Opened IPC server at {}", ipcServer.getListenerAddress());

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java

@@ -246,7 +246,7 @@ public class BackupNode extends NameNode {
           new JournalProtocolServerSideTranslatorPB(this);
       BlockingService service = JournalProtocolService
           .newReflectiveBlockingService(journalProtocolTranslator);
-      DFSUtil.addPBProtocol(conf, JournalProtocolPB.class, service,
+      DFSUtil.addInternalPBProtocol(conf, JournalProtocolPB.class, service,
           this.clientRpcServer);
     }
     

+ 19 - 19
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java

@@ -371,24 +371,24 @@ public class NameNodeRpcServer implements NamenodeProtocols {
           .build();
 
       // Add all the RPC protocols that the namenode implements
-      DFSUtil.addPBProtocol(conf, HAServiceProtocolPB.class, haPbService,
+      DFSUtil.addInternalPBProtocol(conf, HAServiceProtocolPB.class, haPbService,
           serviceRpcServer);
-      DFSUtil.addPBProtocol(conf, ReconfigurationProtocolPB.class,
+      DFSUtil.addInternalPBProtocol(conf, ReconfigurationProtocolPB.class,
           reconfigurationPbService, serviceRpcServer);
-      DFSUtil.addPBProtocol(conf, NamenodeProtocolPB.class, NNPbService,
+      DFSUtil.addInternalPBProtocol(conf, NamenodeProtocolPB.class, NNPbService,
           serviceRpcServer);
-      DFSUtil.addPBProtocol(conf, DatanodeProtocolPB.class, dnProtoPbService,
+      DFSUtil.addInternalPBProtocol(conf, DatanodeProtocolPB.class, dnProtoPbService,
           serviceRpcServer);
-      DFSUtil.addPBProtocol(conf, RefreshAuthorizationPolicyProtocolPB.class,
+      DFSUtil.addInternalPBProtocol(conf, RefreshAuthorizationPolicyProtocolPB.class,
           refreshAuthService, serviceRpcServer);
-      DFSUtil.addPBProtocol(conf, RefreshUserMappingsProtocolPB.class, 
+      DFSUtil.addInternalPBProtocol(conf, RefreshUserMappingsProtocolPB.class,
           refreshUserMappingService, serviceRpcServer);
       // We support Refreshing call queue here in case the client RPC queue is full
-      DFSUtil.addPBProtocol(conf, RefreshCallQueueProtocolPB.class,
+      DFSUtil.addInternalPBProtocol(conf, RefreshCallQueueProtocolPB.class,
           refreshCallQueueService, serviceRpcServer);
-      DFSUtil.addPBProtocol(conf, GenericRefreshProtocolPB.class,
+      DFSUtil.addInternalPBProtocol(conf, GenericRefreshProtocolPB.class,
           genericRefreshService, serviceRpcServer);
-      DFSUtil.addPBProtocol(conf, GetUserMappingsProtocolPB.class, 
+      DFSUtil.addInternalPBProtocol(conf, GetUserMappingsProtocolPB.class,
           getUserMappingService, serviceRpcServer);
 
       // Update the address with the correct port
@@ -431,7 +431,7 @@ public class NameNodeRpcServer implements NamenodeProtocols {
           .setSecretManager(namesystem.getDelegationTokenSecretManager())
           .build();
 
-      DFSUtil.addPBProtocol(conf, DatanodeLifelineProtocolPB.class,
+      DFSUtil.addInternalPBProtocol(conf, DatanodeLifelineProtocolPB.class,
           lifelineProtoPbService, lifelineRpcServer);
 
       // Update the address with the correct port
@@ -474,23 +474,23 @@ public class NameNodeRpcServer implements NamenodeProtocols {
         .build();
 
     // Add all the RPC protocols that the namenode implements
-    DFSUtil.addPBProtocol(conf, HAServiceProtocolPB.class, haPbService,
+    DFSUtil.addInternalPBProtocol(conf, HAServiceProtocolPB.class, haPbService,
         clientRpcServer);
-    DFSUtil.addPBProtocol(conf, ReconfigurationProtocolPB.class,
+    DFSUtil.addInternalPBProtocol(conf, ReconfigurationProtocolPB.class,
         reconfigurationPbService, clientRpcServer);
-    DFSUtil.addPBProtocol(conf, NamenodeProtocolPB.class, NNPbService,
+    DFSUtil.addInternalPBProtocol(conf, NamenodeProtocolPB.class, NNPbService,
         clientRpcServer);
-    DFSUtil.addPBProtocol(conf, DatanodeProtocolPB.class, dnProtoPbService,
+    DFSUtil.addInternalPBProtocol(conf, DatanodeProtocolPB.class, dnProtoPbService,
         clientRpcServer);
-    DFSUtil.addPBProtocol(conf, RefreshAuthorizationPolicyProtocolPB.class, 
+    DFSUtil.addInternalPBProtocol(conf, RefreshAuthorizationPolicyProtocolPB.class,
         refreshAuthService, clientRpcServer);
-    DFSUtil.addPBProtocol(conf, RefreshUserMappingsProtocolPB.class, 
+    DFSUtil.addInternalPBProtocol(conf, RefreshUserMappingsProtocolPB.class,
         refreshUserMappingService, clientRpcServer);
-    DFSUtil.addPBProtocol(conf, RefreshCallQueueProtocolPB.class,
+    DFSUtil.addInternalPBProtocol(conf, RefreshCallQueueProtocolPB.class,
         refreshCallQueueService, clientRpcServer);
-    DFSUtil.addPBProtocol(conf, GenericRefreshProtocolPB.class,
+    DFSUtil.addInternalPBProtocol(conf, GenericRefreshProtocolPB.class,
         genericRefreshService, clientRpcServer);
-    DFSUtil.addPBProtocol(conf, GetUserMappingsProtocolPB.class, 
+    DFSUtil.addInternalPBProtocol(conf, GetUserMappingsProtocolPB.class,
         getUserMappingService, clientRpcServer);
 
     // set service-level authorization security policy

+ 4 - 0
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/pom.xml

@@ -36,6 +36,10 @@
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-hdfs-client</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+    </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-hdfs</artifactId>

+ 10 - 26
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/protocolPB/HSAdminRefreshProtocolClientSideTranslatorPB.java

@@ -22,7 +22,6 @@ import java.io.Closeable;
 import java.io.IOException;
 
 import org.apache.hadoop.classification.InterfaceAudience.Private;
-import org.apache.hadoop.ipc.ProtobufHelper;
 import org.apache.hadoop.ipc.ProtocolMetaInterface;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.RpcClientUtil;
@@ -34,7 +33,8 @@ import org.apache.hadoop.mapreduce.v2.hs.proto.HSAdminRefreshProtocolProtos.Refr
 import org.apache.hadoop.mapreduce.v2.hs.proto.HSAdminRefreshProtocolProtos.RefreshLogRetentionSettingsRequestProto;
 
 import org.apache.hadoop.thirdparty.protobuf.RpcController;
-import org.apache.hadoop.thirdparty.protobuf.ServiceException;
+
+import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
 
 @Private
 public class HSAdminRefreshProtocolClientSideTranslatorPB implements
@@ -73,43 +73,27 @@ public class HSAdminRefreshProtocolClientSideTranslatorPB implements
 
   @Override
   public void refreshAdminAcls() throws IOException {
-    try {
-      rpcProxy.refreshAdminAcls(NULL_CONTROLLER,
-          VOID_REFRESH_ADMIN_ACLS_REQUEST);
-    } catch (ServiceException se) {
-      throw ProtobufHelper.getRemoteException(se);
-    }
+    ipc(() -> rpcProxy.refreshAdminAcls(NULL_CONTROLLER,
+        VOID_REFRESH_ADMIN_ACLS_REQUEST));
   }
 
 
   @Override
   public void refreshLoadedJobCache() throws IOException {
-    try {
-      rpcProxy.refreshLoadedJobCache(NULL_CONTROLLER,
-          VOID_REFRESH_LOADED_JOB_CACHE_REQUEST);
-    } catch (ServiceException se) {
-      throw ProtobufHelper.getRemoteException(se);
-    }
+    ipc(() -> rpcProxy.refreshLoadedJobCache(NULL_CONTROLLER,
+        VOID_REFRESH_LOADED_JOB_CACHE_REQUEST));
   }
   
   @Override
   public void refreshJobRetentionSettings() throws IOException {
-    try {
-      rpcProxy.refreshJobRetentionSettings(NULL_CONTROLLER,
-          VOID_REFRESH_JOB_RETENTION_SETTINGS_REQUEST);
-    } catch (ServiceException se) {
-      throw ProtobufHelper.getRemoteException(se);
-    }
+    ipc(() -> rpcProxy.refreshJobRetentionSettings(NULL_CONTROLLER,
+        VOID_REFRESH_JOB_RETENTION_SETTINGS_REQUEST));
   }
 
   @Override
   public void refreshLogRetentionSettings() throws IOException {
-    try {
-      rpcProxy.refreshLogRetentionSettings(NULL_CONTROLLER,
-          VOID_REFRESH_LOG_RETENTION_SETTINGS_REQUEST);
-    } catch (ServiceException se) {
-      throw ProtobufHelper.getRemoteException(se);
-    }
+    ipc(() -> rpcProxy.refreshLogRetentionSettings(NULL_CONTROLLER,
+        VOID_REFRESH_LOG_RETENTION_SETTINGS_REQUEST));
   }
 
   @Override

+ 1 - 0
hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml

@@ -39,6 +39,7 @@
     <dependency>
       <groupId>com.google.protobuf</groupId>
       <artifactId>protobuf-java</artifactId>
+      <scope>${transient.protobuf2.scope}</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.avro</groupId>

+ 7 - 1
hadoop-project/pom.xml

@@ -86,8 +86,14 @@
     <!-- com.google.re2j version -->
     <re2j.version>1.1</re2j.version>
 
-    <!--Protobuf version for backward compatibility-->
+    <!-- Protobuf version for backward compatibility -->
+    <!-- This is used in hadoop-common for compilation only -->
     <protobuf.version>2.5.0</protobuf.version>
+    <!-- Protobuf scope in hadoop common -->
+    <!-- set to "provided" and protobuf2 will no longer be exported as a dependency  -->
+    <common.protobuf2.scope>compile</common.protobuf2.scope>
+    <!-- Protobuf scope in other modules which explicitly import the libarary -->
+    <transient.protobuf2.scope>${common.protobuf2.scope}</transient.protobuf2.scope>
     <!-- ProtocolBuffer version, actually used in Hadoop -->
     <hadoop.protobuf.version>3.7.1</hadoop.protobuf.version>
     <protoc.path>${env.HADOOP_PROTOC_PATH}</protoc.path>

+ 1 - 0
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/pom.xml

@@ -185,6 +185,7 @@
     <dependency>
       <groupId>com.google.protobuf</groupId>
       <artifactId>protobuf-java</artifactId>
+      <scope>${transient.protobuf2.scope}</scope>
     </dependency>
 
     <dependency>

+ 1 - 0
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml

@@ -132,6 +132,7 @@
     <dependency>
       <groupId>com.google.protobuf</groupId>
       <artifactId>protobuf-java</artifactId>
+      <scope>${transient.protobuf2.scope}</scope>
     </dependency>
     <dependency>
       <groupId>org.bouncycastle</groupId>

+ 3 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceManagerAdministrationProtocolPBClientImpl.java

@@ -24,7 +24,6 @@ import java.net.InetSocketAddress;
 
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.ipc.ProtobufHelper;
 import org.apache.hadoop.ipc.ProtobufRpcEngine2;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.yarn.exceptions.YarnException;
@@ -106,6 +105,8 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResou
 
 import org.apache.hadoop.thirdparty.protobuf.ServiceException;
 
+import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.getRemoteException;
+
 @Private
 public class ResourceManagerAdministrationProtocolPBClientImpl implements ResourceManagerAdministrationProtocol, Closeable {
 
@@ -223,7 +224,7 @@ public class ResourceManagerAdministrationProtocolPBClientImpl implements Resour
       return (String[]) responseProto.getGroupsList().toArray(
           new String[responseProto.getGroupsCount()]);
     } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
+      throw getRemoteException(e);
     }
   }
 

+ 1 - 0
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-csi/pom.xml

@@ -38,6 +38,7 @@
             <groupId>com.google.protobuf</groupId>
             <artifactId>protobuf-java</artifactId>
             <version>${hadoop.protobuf.version}</version>
+            <scope>${transient.protobuf2.scope}</scope>
         </dependency>
         <dependency>
             <groupId>io.netty</groupId>

+ 1 - 0
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/pom.xml

@@ -89,6 +89,7 @@
     <dependency>
       <groupId>com.google.protobuf</groupId>
       <artifactId>protobuf-java</artifactId>
+      <scope>${transient.protobuf2.scope}</scope>
     </dependency>
     <dependency>
       <groupId>junit</groupId>

+ 1 - 0
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml

@@ -81,6 +81,7 @@
     <dependency>
       <groupId>com.google.protobuf</groupId>
       <artifactId>protobuf-java</artifactId>
+      <scope>${transient.protobuf2.scope}</scope>
     </dependency>
     <dependency>
       <groupId>commons-io</groupId>

이 변경점에서 너무 많은 파일들이 변경되어 몇몇 파일들은 표시되지 않았습니다.