소스 검색

HDFS-15790. Make ProtobufRpcEngineProtos and ProtobufRpcEngineProtos2 Co-Exist (#2767)

Vinayakumar B 4 년 전
부모
커밋
2bbeae3240

+ 48 - 1
hadoop-common-project/hadoop-common/pom.xml

@@ -413,7 +413,12 @@
           </execution>
           <execution>
             <id>src-test-compile-protoc</id>
-            <configuration><skip>false</skip></configuration>
+            <configuration>
+              <skip>false</skip>
+              <excludes>
+                <exclude>*legacy.proto</exclude>
+              </excludes>
+            </configuration>
           </execution>
         </executions>
       </plugin>
@@ -434,6 +439,10 @@
             <id>replace-generated-test-sources</id>
             <configuration>
               <skip>false</skip>
+              <excludes>
+                <exclude>**/TestProtosLegacy.java</exclude>
+                <exclude>**/TestRpcServiceProtosLegacy.java</exclude>
+              </excludes>
             </configuration>
           </execution>
           <execution>
@@ -446,6 +455,7 @@
                 <exclude>**/RpcWritable.java</exclude>
                 <exclude>**/ProtobufRpcEngineCallback.java</exclude>
                 <exclude>**/ProtobufRpcEngine.java</exclude>
+                <exclude>**/ProtobufRpcEngine2.java</exclude>
                 <exclude>**/ProtobufRpcEngineProtos.java</exclude>
               </excludes>
             </configuration>
@@ -454,6 +464,9 @@
             <id>replace-test-sources</id>
             <configuration>
               <skip>false</skip>
+              <excludes>
+                <exclude>**/TestProtoBufRpc.java</exclude>
+              </excludes>
             </configuration>
           </execution>
         </executions>
@@ -1077,6 +1090,18 @@
                   </sources>
                 </configuration>
               </execution>
+              <execution>
+                <id>add-test-source-legacy-protobuf</id>
+                <phase>generate-test-sources</phase>
+                <goals>
+                  <goal>add-test-source</goal>
+                </goals>
+                <configuration>
+                  <sources>
+                    <source>${basedir}/src/test/arm-java</source>
+                  </sources>
+                </configuration>
+              </execution>
             </executions>
           </plugin>
         </plugins>
@@ -1118,6 +1143,28 @@
                   </includes>
                 </configuration>
               </execution>
+              <execution>
+                <id>src-test-compile-protoc-legacy</id>
+                <phase>generate-test-sources</phase>
+                <goals>
+                  <goal>compile</goal>
+                </goals>
+                <configuration>
+                  <skip>false</skip>
+                  <!--Generating with old protobuf version for backward compatibility-->
+                  <protocArtifact>
+                    com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier}
+                  </protocArtifact>
+                  <includeDependenciesInDescriptorSet>false</includeDependenciesInDescriptorSet>
+                  <protoSourceRoot>${basedir}/src/test/proto</protoSourceRoot>
+                  <outputDirectory>${project.build.directory}/generated-test-sources/java</outputDirectory>
+                  <clearOutputDirectory>false</clearOutputDirectory>
+                  <includes>
+                    <include>test_legacy.proto</include>
+                    <include>test_rpc_service_legacy.proto</include>
+                  </includes>
+                </configuration>
+              </execution>
              </executions>
           </plugin>
         </plugins>

+ 58 - 157
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java

@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.ipc;
 
-import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
 import com.google.protobuf.BlockingService;
 import com.google.protobuf.Descriptors.MethodDescriptor;
 import com.google.protobuf.Message;
@@ -31,16 +30,15 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.ipc.Client.ConnectionId;
-import org.apache.hadoop.ipc.RPC.RpcInvoker;
-import org.apache.hadoop.ipc.RPC.RpcKind;
 import org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.TokenIdentifier;
-import org.apache.hadoop.util.Time;
-import org.apache.hadoop.util.concurrent.AsyncGet;
+import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.tracing.TraceScope;
 import org.apache.hadoop.tracing.Tracer;
+import org.apache.hadoop.util.Time;
+import org.apache.hadoop.util.concurrent.AsyncGet;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -68,9 +66,8 @@ public class ProtobufRpcEngine implements RpcEngine {
       ASYNC_RETURN_MESSAGE = new ThreadLocal<>();
 
   static { // Register the rpcRequest deserializer for ProtobufRpcEngine
-    org.apache.hadoop.ipc.Server.registerProtocolEngine(
-        RPC.RpcKind.RPC_PROTOCOL_BUFFER, RpcProtobufRequest.class,
-        new Server.ProtoBufRpcInvoker());
+    //These will be used in server side, which is always ProtobufRpcEngine2
+    ProtobufRpcEngine2.registerProtocolEngine();
   }
 
   private static final ClientCache CLIENTS = new ClientCache();
@@ -352,8 +349,6 @@ public class ProtobufRpcEngine implements RpcEngine {
     return CLIENTS.getClient(conf, SocketFactory.getDefault(),
         RpcWritable.Buffer.class);
   }
-  
- 
 
   @Override
   public RPC.Server getServer(Class<?> protocol, Object protocolImpl,
@@ -366,25 +361,17 @@ public class ProtobufRpcEngine implements RpcEngine {
         numHandlers, numReaders, queueSizePerHandler, verbose, secretManager,
         portRangeConfig, alignmentContext);
   }
-  
-  public static class Server extends RPC.Server {
+
+  /**
+   * Server implementation is always ProtobufRpcEngine2 based implementation,
+   * supports backward compatibility for protobuf 2.5 based implementations,
+   * which uses non-shaded protobuf classes.
+   */
+  public static class Server extends ProtobufRpcEngine2.Server {
 
     static final ThreadLocal<ProtobufRpcEngineCallback> currentCallback =
         new ThreadLocal<>();
 
-    static final ThreadLocal<CallInfo> currentCallInfo = new ThreadLocal<>();
-    private static final RpcInvoker RPC_INVOKER = new ProtoBufRpcInvoker();
-
-    static class CallInfo {
-      private final RPC.Server server;
-      private final String methodName;
-
-      public CallInfo(RPC.Server server, String methodName) {
-        this.server = server;
-        this.methodName = methodName;
-      }
-    }
-
     static class ProtobufRpcEngineCallbackImpl
         implements ProtobufRpcEngineCallback {
 
@@ -394,9 +381,9 @@ public class ProtobufRpcEngine implements RpcEngine {
       private final long setupTime;
 
       public ProtobufRpcEngineCallbackImpl() {
-        this.server = currentCallInfo.get().server;
+        this.server = CURRENT_CALL_INFO.get().getServer();
         this.call = Server.getCurCall().get();
-        this.methodName = currentCallInfo.get().methodName;
+        this.methodName = CURRENT_CALL_INFO.get().getMethodName();
         this.setupTime = Time.now();
       }
 
@@ -443,144 +430,58 @@ public class ProtobufRpcEngine implements RpcEngine {
         SecretManager<? extends TokenIdentifier> secretManager, 
         String portRangeConfig, AlignmentContext alignmentContext)
         throws IOException {
-      super(bindAddress, port, null, numHandlers,
-          numReaders, queueSizePerHandler, conf,
-          serverNameFromClass(protocolImpl.getClass()), secretManager,
-          portRangeConfig);
-      setAlignmentContext(alignmentContext);
-      this.verbose = verbose;  
-      registerProtocolAndImpl(RPC.RpcKind.RPC_PROTOCOL_BUFFER, protocolClass,
-          protocolImpl);
-    }
-
-    @Override
-    protected RpcInvoker getServerRpcInvoker(RpcKind rpcKind) {
-      if (rpcKind == RpcKind.RPC_PROTOCOL_BUFFER) {
-        return RPC_INVOKER;
-      }
-      return super.getServerRpcInvoker(rpcKind);
+      super(protocolClass, protocolImpl, conf, bindAddress, port, numHandlers,
+          numReaders, queueSizePerHandler, verbose, secretManager,
+          portRangeConfig, alignmentContext);
     }
 
     /**
-     * Protobuf invoker for {@link RpcInvoker}
+     * This implementation is same as
+     * ProtobufRpcEngine2.Server.ProtobufInvoker#call(..)
+     * except this implementation uses non-shaded protobuf classes from legacy
+     * protobuf version (default 2.5.0).
      */
-    static class ProtoBufRpcInvoker implements RpcInvoker {
-      private static ProtoClassProtoImpl getProtocolImpl(RPC.Server server,
-          String protoName, long clientVersion) throws RpcServerException {
-        ProtoNameVer pv = new ProtoNameVer(protoName, clientVersion);
-        ProtoClassProtoImpl impl = 
-            server.getProtocolImplMap(RPC.RpcKind.RPC_PROTOCOL_BUFFER).get(pv);
-        if (impl == null) { // no match for Protocol AND Version
-          VerProtocolImpl highest = 
-              server.getHighestSupportedProtocol(RPC.RpcKind.RPC_PROTOCOL_BUFFER, 
-                  protoName);
-          if (highest == null) {
-            throw new RpcNoSuchProtocolException(
-                "Unknown protocol: " + protoName);
-          }
-          // protocol supported but not the version that client wants
-          throw new RPC.VersionMismatch(protoName, clientVersion,
-              highest.version);
-        }
-        return impl;
+    static RpcWritable processCall(RPC.Server server,
+        String connectionProtocolName, RpcWritable.Buffer request,
+        String methodName, ProtoClassProtoImpl protocolImpl) throws Exception {
+      BlockingService service = (BlockingService) protocolImpl.protocolImpl;
+      MethodDescriptor methodDescriptor = service.getDescriptorForType()
+          .findMethodByName(methodName);
+      if (methodDescriptor == null) {
+        String msg = "Unknown method " + methodName + " called on "
+                              + connectionProtocolName + " protocol.";
+        LOG.warn(msg);
+        throw new RpcNoSuchMethodException(msg);
       }
+      Message prototype = service.getRequestPrototype(methodDescriptor);
+      Message param = request.getValue(prototype);
 
-      @Override 
-      /**
-       * This is a server side method, which is invoked over RPC. On success
-       * the return response has protobuf response payload. On failure, the
-       * exception name and the stack trace are returned in the response.
-       * See {@link HadoopRpcResponseProto}
-       * 
-       * In this method there three types of exceptions possible and they are
-       * returned in response as follows.
-       * <ol>
-       * <li> Exceptions encountered in this method that are returned 
-       * as {@link RpcServerException} </li>
-       * <li> Exceptions thrown by the service is wrapped in ServiceException. 
-       * In that this method returns in response the exception thrown by the 
-       * service.</li>
-       * <li> Other exceptions thrown by the service. They are returned as
-       * it is.</li>
-       * </ol>
-       */
-      public Writable call(RPC.Server server, String connectionProtocolName,
-          Writable writableRequest, long receiveTime) throws Exception {
-        RpcProtobufRequest request = (RpcProtobufRequest) writableRequest;
-        RequestHeaderProto rpcRequest = request.getRequestHeader();
-        String methodName = rpcRequest.getMethodName();
-
-        /** 
-         * RPCs for a particular interface (ie protocol) are done using a
-         * IPC connection that is setup using rpcProxy.
-         * The rpcProxy's has a declared protocol name that is 
-         * sent form client to server at connection time. 
-         * 
-         * Each Rpc call also sends a protocol name 
-         * (called declaringClassprotocolName). This name is usually the same
-         * as the connection protocol name except in some cases. 
-         * For example metaProtocols such ProtocolInfoProto which get info
-         * about the protocol reuse the connection but need to indicate that
-         * the actual protocol is different (i.e. the protocol is
-         * ProtocolInfoProto) since they reuse the connection; in this case
-         * the declaringClassProtocolName field is set to the ProtocolInfoProto.
-         */
-
-        String declaringClassProtoName = 
-            rpcRequest.getDeclaringClassProtocolName();
-        long clientVersion = rpcRequest.getClientProtocolVersion();
-        return call(server, connectionProtocolName, request, receiveTime,
-            methodName, declaringClassProtoName, clientVersion);
-      }
-
-      protected Writable call(RPC.Server server, String connectionProtocolName,
-          RpcWritable.Buffer request, long receiveTime, String methodName,
-          String declaringClassProtoName, long clientVersion) throws Exception {
-        if (server.verbose)
-          LOG.info("Call: connectionProtocolName=" + connectionProtocolName + 
-              ", method=" + methodName);
-        
-        ProtoClassProtoImpl protocolImpl = getProtocolImpl(server, 
-                              declaringClassProtoName, clientVersion);
-        BlockingService service = (BlockingService) protocolImpl.protocolImpl;
-        MethodDescriptor methodDescriptor = service.getDescriptorForType()
-            .findMethodByName(methodName);
-        if (methodDescriptor == null) {
-          String msg = "Unknown method " + methodName + " called on " 
-                                + connectionProtocolName + " protocol.";
-          LOG.warn(msg);
-          throw new RpcNoSuchMethodException(msg);
-        }
-        Message prototype = service.getRequestPrototype(methodDescriptor);
-        Message param = request.getValue(prototype);
-
-        Message result;
-        Call currentCall = Server.getCurCall().get();
-        try {
-          server.rpcDetailedMetrics.init(protocolImpl.protocolClass);
-          currentCallInfo.set(new CallInfo(server, methodName));
-          currentCall.setDetailedMetricsName(methodName);
-          result = service.callBlockingMethod(methodDescriptor, null, param);
-          // Check if this needs to be a deferred response,
-          // by checking the ThreadLocal callback being set
-          if (currentCallback.get() != null) {
-            currentCall.deferResponse();
-            currentCallback.set(null);
-            return null;
-          }
-        } catch (ServiceException e) {
-          Exception exception = (Exception) e.getCause();
-          currentCall.setDetailedMetricsName(
-              exception.getClass().getSimpleName());
-          throw (Exception) e.getCause();
-        } catch (Exception e) {
-          currentCall.setDetailedMetricsName(e.getClass().getSimpleName());
-          throw e;
-        } finally {
-          currentCallInfo.set(null);
+      Message result;
+      Call currentCall = Server.getCurCall().get();
+      try {
+        server.rpcDetailedMetrics.init(protocolImpl.protocolClass);
+        CURRENT_CALL_INFO.set(new CallInfo(server, methodName));
+        currentCall.setDetailedMetricsName(methodName);
+        result = service.callBlockingMethod(methodDescriptor, null, param);
+        // Check if this needs to be a deferred response,
+        // by checking the ThreadLocal callback being set
+        if (currentCallback.get() != null) {
+          currentCall.deferResponse();
+          currentCallback.set(null);
+          return null;
         }
-        return RpcWritable.wrap(result);
+      } catch (ServiceException e) {
+        Exception exception = (Exception) e.getCause();
+        currentCall
+            .setDetailedMetricsName(exception.getClass().getSimpleName());
+        throw (Exception) e.getCause();
+      } catch (Exception e) {
+        currentCall.setDetailedMetricsName(e.getClass().getSimpleName());
+        throw e;
+      } finally {
+        CURRENT_CALL_INFO.set(null);
       }
+      return RpcWritable.wrap(result);
     }
   }
 

+ 54 - 9
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine2.java

@@ -18,9 +18,6 @@
 
 package org.apache.hadoop.ipc;
 
-import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
-import org.apache.hadoop.thirdparty.protobuf.*;
-import org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
@@ -33,6 +30,12 @@ import org.apache.hadoop.ipc.protobuf.ProtobufRpcEngine2Protos.RequestHeaderProt
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.thirdparty.protobuf.BlockingService;
+import org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor;
+import org.apache.hadoop.thirdparty.protobuf.Message;
+import org.apache.hadoop.thirdparty.protobuf.ServiceException;
+import org.apache.hadoop.thirdparty.protobuf.TextFormat;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.util.concurrent.AsyncGet;
 import org.apache.hadoop.tracing.Tracer;
@@ -61,9 +64,16 @@ public class ProtobufRpcEngine2 implements RpcEngine {
       ASYNC_RETURN_MESSAGE = new ThreadLocal<>();
 
   static { // Register the rpcRequest deserializer for ProtobufRpcEngine
-    org.apache.hadoop.ipc.Server.registerProtocolEngine(
-        RPC.RpcKind.RPC_PROTOCOL_BUFFER, RpcProtobufRequest.class,
-        new Server.ProtoBufRpcInvoker());
+    registerProtocolEngine();
+  }
+
+  static void registerProtocolEngine() {
+    if (Server.getRpcInvoker(RPC.RpcKind.RPC_PROTOCOL_BUFFER) == null) {
+      org.apache.hadoop.ipc.Server
+          .registerProtocolEngine(RPC.RpcKind.RPC_PROTOCOL_BUFFER,
+              ProtobufRpcEngine2.RpcProtobufRequest.class,
+              new Server.ProtoBufRpcInvoker());
+    }
   }
 
   private static final ClientCache CLIENTS = new ClientCache();
@@ -383,6 +393,14 @@ public class ProtobufRpcEngine2 implements RpcEngine {
         this.server = server;
         this.methodName = methodName;
       }
+
+      public RPC.Server getServer() {
+        return server;
+      }
+
+      public String getMethodName() {
+        return methodName;
+      }
     }
 
     static class ProtobufRpcEngineCallbackImpl
@@ -394,9 +412,9 @@ public class ProtobufRpcEngine2 implements RpcEngine {
       private final long setupTime;
 
       ProtobufRpcEngineCallbackImpl() {
-        this.server = CURRENT_CALL_INFO.get().server;
+        this.server = CURRENT_CALL_INFO.get().getServer();
         this.call = Server.getCurCall().get();
-        this.methodName = CURRENT_CALL_INFO.get().methodName;
+        this.methodName = CURRENT_CALL_INFO.get().getMethodName();
         this.setupTime = Time.now();
       }
 
@@ -417,7 +435,7 @@ public class ProtobufRpcEngine2 implements RpcEngine {
     }
 
     @InterfaceStability.Unstable
-    public static ProtobufRpcEngineCallback2 registerForDeferredResponse() {
+    public static ProtobufRpcEngineCallback2 registerForDeferredResponse2() {
       ProtobufRpcEngineCallback2 callback = new ProtobufRpcEngineCallbackImpl();
       CURRENT_CALLBACK.set(callback);
       return callback;
@@ -453,6 +471,17 @@ public class ProtobufRpcEngine2 implements RpcEngine {
           protocolImpl);
     }
 
+    //Use the latest protobuf rpc invoker itself as that is backward compatible.
+    private static final RpcInvoker RPC_INVOKER = new ProtoBufRpcInvoker();
+
+    @Override
+    protected RpcInvoker getServerRpcInvoker(RPC.RpcKind rpcKind) {
+      if (rpcKind == RPC.RpcKind.RPC_PROTOCOL_BUFFER) {
+        return RPC_INVOKER;
+      }
+      return super.getServerRpcInvoker(rpcKind);
+    }
+
     /**
      * Protobuf invoker for {@link RpcInvoker}.
      */
@@ -524,6 +553,7 @@ public class ProtobufRpcEngine2 implements RpcEngine {
             methodName, declaringClassProtoName, clientVersion);
       }
 
+      @SuppressWarnings("deprecation")
       protected Writable call(RPC.Server server, String connectionProtocolName,
           RpcWritable.Buffer request, long receiveTime, String methodName,
           String declaringClassProtoName, long clientVersion) throws Exception {
@@ -534,6 +564,21 @@ public class ProtobufRpcEngine2 implements RpcEngine {
 
         ProtoClassProtoImpl protocolImpl = getProtocolImpl(server,
                               declaringClassProtoName, clientVersion);
+        if (protocolImpl.isShadedPBImpl()) {
+          return call(server, connectionProtocolName, request, methodName,
+              protocolImpl);
+        }
+        //Legacy protobuf implementation. Handle using legacy (Non-shaded)
+        // protobuf classes.
+        return ProtobufRpcEngine.Server
+            .processCall(server, connectionProtocolName, request, methodName,
+                protocolImpl);
+      }
+
+      private RpcWritable call(RPC.Server server,
+          String connectionProtocolName, RpcWritable.Buffer request,
+          String methodName, ProtoClassProtoImpl protocolImpl)
+          throws Exception {
         BlockingService service = (BlockingService) protocolImpl.protocolImpl;
         MethodDescriptor methodDescriptor = service.getDescriptorForType()
             .findMethodByName(methodName);

+ 8 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java

@@ -937,11 +937,18 @@ public class RPC {
     */
    static class ProtoClassProtoImpl {
      final Class<?> protocolClass;
-     final Object protocolImpl; 
+      final Object protocolImpl;
+      private final boolean shadedPBImpl;
+
      ProtoClassProtoImpl(Class<?> protocolClass, Object protocolImpl) {
        this.protocolClass = protocolClass;
        this.protocolImpl = protocolImpl;
+       this.shadedPBImpl = protocolImpl instanceof BlockingService;
      }
+
+      public boolean isShadedPBImpl() {
+        return shadedPBImpl;
+      }
    }
 
    ArrayList<Map<ProtoNameVer, ProtoClassProtoImpl>> protocolImplMapArray = 

+ 9892 - 0
hadoop-common-project/hadoop-common/src/test/arm-java/org/apache/hadoop/ipc/protobuf/TestProtosLegacy.java

@@ -0,0 +1,9892 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// This is class is added to source because for arm protoc 2.5.0 executable
+// is not available to generate the same code.
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: test_legacy.proto
+
+package org.apache.hadoop.ipc.protobuf;
+
+public final class TestProtosLegacy {
+  private TestProtosLegacy() {}
+  public static void registerAllExtensions(
+      com.google.protobuf.ExtensionRegistry registry) {
+  }
+  public interface EmptyRequestProtoOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+  }
+  /**
+   * Protobuf type {@code hadoop.common.EmptyRequestProto}
+   */
+  public static final class EmptyRequestProto extends
+      com.google.protobuf.GeneratedMessage
+      implements EmptyRequestProtoOrBuilder {
+    // Use EmptyRequestProto.newBuilder() to construct.
+    private EmptyRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private EmptyRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final EmptyRequestProto defaultInstance;
+    public static EmptyRequestProto getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public EmptyRequestProto getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private EmptyRequestProto(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EmptyRequestProto_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<EmptyRequestProto> PARSER =
+        new com.google.protobuf.AbstractParser<EmptyRequestProto>() {
+      public EmptyRequestProto parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new EmptyRequestProto(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<EmptyRequestProto> getParserForType() {
+      return PARSER;
+    }
+
+    private void initFields() {
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto) obj;
+
+      boolean result = true;
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hadoop.common.EmptyRequestProto}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProtoOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EmptyRequestProto_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EmptyRequestProto_descriptor;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto getDefaultInstanceForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto build() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto buildPartial() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto(this);
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto) {
+          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto other) {
+        if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance()) return this;
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hadoop.common.EmptyRequestProto)
+    }
+
+    static {
+      defaultInstance = new EmptyRequestProto(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.EmptyRequestProto)
+  }
+
+  public interface EmptyResponseProtoOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+  }
+  /**
+   * Protobuf type {@code hadoop.common.EmptyResponseProto}
+   */
+  public static final class EmptyResponseProto extends
+      com.google.protobuf.GeneratedMessage
+      implements EmptyResponseProtoOrBuilder {
+    // Use EmptyResponseProto.newBuilder() to construct.
+    private EmptyResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private EmptyResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final EmptyResponseProto defaultInstance;
+    public static EmptyResponseProto getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public EmptyResponseProto getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private EmptyResponseProto(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EmptyResponseProto_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<EmptyResponseProto> PARSER =
+        new com.google.protobuf.AbstractParser<EmptyResponseProto>() {
+      public EmptyResponseProto parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new EmptyResponseProto(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<EmptyResponseProto> getParserForType() {
+      return PARSER;
+    }
+
+    private void initFields() {
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) obj;
+
+      boolean result = true;
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hadoop.common.EmptyResponseProto}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProtoOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EmptyResponseProto_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EmptyResponseProto_descriptor;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto getDefaultInstanceForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto build() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto buildPartial() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto(this);
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) {
+          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto other) {
+        if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()) return this;
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hadoop.common.EmptyResponseProto)
+    }
+
+    static {
+      defaultInstance = new EmptyResponseProto(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.EmptyResponseProto)
+  }
+
+  public interface EchoRequestProtoOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // required string message = 1;
+    /**
+     * <code>required string message = 1;</code>
+     */
+    boolean hasMessage();
+    /**
+     * <code>required string message = 1;</code>
+     */
+    java.lang.String getMessage();
+    /**
+     * <code>required string message = 1;</code>
+     */
+    com.google.protobuf.ByteString
+        getMessageBytes();
+  }
+  /**
+   * Protobuf type {@code hadoop.common.EchoRequestProto}
+   */
+  public static final class EchoRequestProto extends
+      com.google.protobuf.GeneratedMessage
+      implements EchoRequestProtoOrBuilder {
+    // Use EchoRequestProto.newBuilder() to construct.
+    private EchoRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private EchoRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final EchoRequestProto defaultInstance;
+    public static EchoRequestProto getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public EchoRequestProto getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private EchoRequestProto(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 10: {
+              bitField0_ |= 0x00000001;
+              message_ = input.readBytes();
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoRequestProto_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<EchoRequestProto> PARSER =
+        new com.google.protobuf.AbstractParser<EchoRequestProto>() {
+      public EchoRequestProto parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new EchoRequestProto(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<EchoRequestProto> getParserForType() {
+      return PARSER;
+    }
+
+    private int bitField0_;
+    // required string message = 1;
+    public static final int MESSAGE_FIELD_NUMBER = 1;
+    private java.lang.Object message_;
+    /**
+     * <code>required string message = 1;</code>
+     */
+    public boolean hasMessage() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>required string message = 1;</code>
+     */
+    public java.lang.String getMessage() {
+      java.lang.Object ref = message_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs =
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          message_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>required string message = 1;</code>
+     */
+    public com.google.protobuf.ByteString
+        getMessageBytes() {
+      java.lang.Object ref = message_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b =
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        message_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    private void initFields() {
+      message_ = "";
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      if (!hasMessage()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeBytes(1, getMessageBytes());
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(1, getMessageBytes());
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto) obj;
+
+      boolean result = true;
+      result = result && (hasMessage() == other.hasMessage());
+      if (hasMessage()) {
+        result = result && getMessage()
+            .equals(other.getMessage());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (hasMessage()) {
+        hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
+        hash = (53 * hash) + getMessage().hashCode();
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hadoop.common.EchoRequestProto}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProtoOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoRequestProto_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        message_ = "";
+        bitField0_ = (bitField0_ & ~0x00000001);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoRequestProto_descriptor;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto getDefaultInstanceForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto build() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto buildPartial() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.message_ = message_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto) {
+          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto other) {
+        if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.getDefaultInstance()) return this;
+        if (other.hasMessage()) {
+          bitField0_ |= 0x00000001;
+          message_ = other.message_;
+          onChanged();
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        if (!hasMessage()) {
+          return false;
+        }
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // required string message = 1;
+      private java.lang.Object message_ = "";
+      /**
+       * <code>required string message = 1;</code>
+       */
+      public boolean hasMessage() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      /**
+       * <code>required string message = 1;</code>
+       */
+      public java.lang.String getMessage() {
+        java.lang.Object ref = message_;
+        if (!(ref instanceof java.lang.String)) {
+          java.lang.String s = ((com.google.protobuf.ByteString) ref)
+              .toStringUtf8();
+          message_ = s;
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>required string message = 1;</code>
+       */
+      public com.google.protobuf.ByteString
+          getMessageBytes() {
+        java.lang.Object ref = message_;
+        if (ref instanceof String) {
+          com.google.protobuf.ByteString b =
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          message_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>required string message = 1;</code>
+       */
+      public Builder setMessage(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+        message_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required string message = 1;</code>
+       */
+      public Builder clearMessage() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        message_ = getDefaultInstance().getMessage();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required string message = 1;</code>
+       */
+      public Builder setMessageBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+        message_ = value;
+        onChanged();
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hadoop.common.EchoRequestProto)
+    }
+
+    static {
+      defaultInstance = new EchoRequestProto(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.EchoRequestProto)
+  }
+
+  public interface EchoResponseProtoOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // required string message = 1;
+    /**
+     * <code>required string message = 1;</code>
+     */
+    boolean hasMessage();
+    /**
+     * <code>required string message = 1;</code>
+     */
+    java.lang.String getMessage();
+    /**
+     * <code>required string message = 1;</code>
+     */
+    com.google.protobuf.ByteString
+        getMessageBytes();
+  }
+  /**
+   * Protobuf type {@code hadoop.common.EchoResponseProto}
+   */
+  public static final class EchoResponseProto extends
+      com.google.protobuf.GeneratedMessage
+      implements EchoResponseProtoOrBuilder {
+    // Use EchoResponseProto.newBuilder() to construct.
+    private EchoResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private EchoResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final EchoResponseProto defaultInstance;
+    public static EchoResponseProto getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public EchoResponseProto getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private EchoResponseProto(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 10: {
+              bitField0_ |= 0x00000001;
+              message_ = input.readBytes();
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoResponseProto_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<EchoResponseProto> PARSER =
+        new com.google.protobuf.AbstractParser<EchoResponseProto>() {
+      public EchoResponseProto parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new EchoResponseProto(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<EchoResponseProto> getParserForType() {
+      return PARSER;
+    }
+
+    private int bitField0_;
+    // required string message = 1;
+    public static final int MESSAGE_FIELD_NUMBER = 1;
+    private java.lang.Object message_;
+    /**
+     * <code>required string message = 1;</code>
+     */
+    public boolean hasMessage() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>required string message = 1;</code>
+     */
+    public java.lang.String getMessage() {
+      java.lang.Object ref = message_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs =
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          message_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>required string message = 1;</code>
+     */
+    public com.google.protobuf.ByteString
+        getMessageBytes() {
+      java.lang.Object ref = message_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b =
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        message_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    private void initFields() {
+      message_ = "";
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      if (!hasMessage()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeBytes(1, getMessageBytes());
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(1, getMessageBytes());
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto) obj;
+
+      boolean result = true;
+      result = result && (hasMessage() == other.hasMessage());
+      if (hasMessage()) {
+        result = result && getMessage()
+            .equals(other.getMessage());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (hasMessage()) {
+        hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
+        hash = (53 * hash) + getMessage().hashCode();
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hadoop.common.EchoResponseProto}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProtoOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoResponseProto_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        message_ = "";
+        bitField0_ = (bitField0_ & ~0x00000001);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoResponseProto_descriptor;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto getDefaultInstanceForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto build() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto buildPartial() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.message_ = message_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto) {
+          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto other) {
+        if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance()) return this;
+        if (other.hasMessage()) {
+          bitField0_ |= 0x00000001;
+          message_ = other.message_;
+          onChanged();
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        if (!hasMessage()) {
+          return false;
+        }
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // required string message = 1;
+      private java.lang.Object message_ = "";
+      /**
+       * <code>required string message = 1;</code>
+       */
+      public boolean hasMessage() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      /**
+       * <code>required string message = 1;</code>
+       */
+      public java.lang.String getMessage() {
+        java.lang.Object ref = message_;
+        if (!(ref instanceof java.lang.String)) {
+          java.lang.String s = ((com.google.protobuf.ByteString) ref)
+              .toStringUtf8();
+          message_ = s;
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>required string message = 1;</code>
+       */
+      public com.google.protobuf.ByteString
+          getMessageBytes() {
+        java.lang.Object ref = message_;
+        if (ref instanceof String) {
+          com.google.protobuf.ByteString b =
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          message_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>required string message = 1;</code>
+       */
+      public Builder setMessage(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+        message_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required string message = 1;</code>
+       */
+      public Builder clearMessage() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        message_ = getDefaultInstance().getMessage();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required string message = 1;</code>
+       */
+      public Builder setMessageBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+        message_ = value;
+        onChanged();
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hadoop.common.EchoResponseProto)
+    }
+
+    static {
+      defaultInstance = new EchoResponseProto(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.EchoResponseProto)
+  }
+
+  public interface OptRequestProtoOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // optional string message = 1;
+    /**
+     * <code>optional string message = 1;</code>
+     */
+    boolean hasMessage();
+    /**
+     * <code>optional string message = 1;</code>
+     */
+    java.lang.String getMessage();
+    /**
+     * <code>optional string message = 1;</code>
+     */
+    com.google.protobuf.ByteString
+        getMessageBytes();
+  }
+  /**
+   * Protobuf type {@code hadoop.common.OptRequestProto}
+   */
+  public static final class OptRequestProto extends
+      com.google.protobuf.GeneratedMessage
+      implements OptRequestProtoOrBuilder {
+    // Use OptRequestProto.newBuilder() to construct.
+    private OptRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private OptRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final OptRequestProto defaultInstance;
+    public static OptRequestProto getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public OptRequestProto getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private OptRequestProto(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 10: {
+              bitField0_ |= 0x00000001;
+              message_ = input.readBytes();
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_OptRequestProto_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_OptRequestProto_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<OptRequestProto> PARSER =
+        new com.google.protobuf.AbstractParser<OptRequestProto>() {
+      public OptRequestProto parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new OptRequestProto(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<OptRequestProto> getParserForType() {
+      return PARSER;
+    }
+
+    private int bitField0_;
+    // optional string message = 1;
+    public static final int MESSAGE_FIELD_NUMBER = 1;
+    private java.lang.Object message_;
+    /**
+     * <code>optional string message = 1;</code>
+     */
+    public boolean hasMessage() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>optional string message = 1;</code>
+     */
+    public java.lang.String getMessage() {
+      java.lang.Object ref = message_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs =
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          message_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>optional string message = 1;</code>
+     */
+    public com.google.protobuf.ByteString
+        getMessageBytes() {
+      java.lang.Object ref = message_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b =
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        message_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    private void initFields() {
+      message_ = "";
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeBytes(1, getMessageBytes());
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(1, getMessageBytes());
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto) obj;
+
+      boolean result = true;
+      result = result && (hasMessage() == other.hasMessage());
+      if (hasMessage()) {
+        result = result && getMessage()
+            .equals(other.getMessage());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (hasMessage()) {
+        hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
+        hash = (53 * hash) + getMessage().hashCode();
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hadoop.common.OptRequestProto}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProtoOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_OptRequestProto_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_OptRequestProto_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        message_ = "";
+        bitField0_ = (bitField0_ & ~0x00000001);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_OptRequestProto_descriptor;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto getDefaultInstanceForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto build() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto buildPartial() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.message_ = message_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto) {
+          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto other) {
+        if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto.getDefaultInstance()) return this;
+        if (other.hasMessage()) {
+          bitField0_ |= 0x00000001;
+          message_ = other.message_;
+          onChanged();
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // optional string message = 1;
+      private java.lang.Object message_ = "";
+      /**
+       * <code>optional string message = 1;</code>
+       */
+      public boolean hasMessage() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      /**
+       * <code>optional string message = 1;</code>
+       */
+      public java.lang.String getMessage() {
+        java.lang.Object ref = message_;
+        if (!(ref instanceof java.lang.String)) {
+          java.lang.String s = ((com.google.protobuf.ByteString) ref)
+              .toStringUtf8();
+          message_ = s;
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>optional string message = 1;</code>
+       */
+      public com.google.protobuf.ByteString
+          getMessageBytes() {
+        java.lang.Object ref = message_;
+        if (ref instanceof String) {
+          com.google.protobuf.ByteString b =
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          message_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>optional string message = 1;</code>
+       */
+      public Builder setMessage(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+        message_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string message = 1;</code>
+       */
+      public Builder clearMessage() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        message_ = getDefaultInstance().getMessage();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string message = 1;</code>
+       */
+      public Builder setMessageBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+        message_ = value;
+        onChanged();
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hadoop.common.OptRequestProto)
+    }
+
+    static {
+      defaultInstance = new OptRequestProto(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.OptRequestProto)
+  }
+
+  public interface OptResponseProtoOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // optional string message = 1;
+    /**
+     * <code>optional string message = 1;</code>
+     */
+    boolean hasMessage();
+    /**
+     * <code>optional string message = 1;</code>
+     */
+    java.lang.String getMessage();
+    /**
+     * <code>optional string message = 1;</code>
+     */
+    com.google.protobuf.ByteString
+        getMessageBytes();
+  }
+  /**
+   * Protobuf type {@code hadoop.common.OptResponseProto}
+   */
+  public static final class OptResponseProto extends
+      com.google.protobuf.GeneratedMessage
+      implements OptResponseProtoOrBuilder {
+    // Use OptResponseProto.newBuilder() to construct.
+    private OptResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private OptResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final OptResponseProto defaultInstance;
+    public static OptResponseProto getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public OptResponseProto getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private OptResponseProto(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 10: {
+              bitField0_ |= 0x00000001;
+              message_ = input.readBytes();
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_OptResponseProto_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_OptResponseProto_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<OptResponseProto> PARSER =
+        new com.google.protobuf.AbstractParser<OptResponseProto>() {
+      public OptResponseProto parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new OptResponseProto(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<OptResponseProto> getParserForType() {
+      return PARSER;
+    }
+
+    private int bitField0_;
+    // optional string message = 1;
+    public static final int MESSAGE_FIELD_NUMBER = 1;
+    private java.lang.Object message_;
+    /**
+     * <code>optional string message = 1;</code>
+     */
+    public boolean hasMessage() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>optional string message = 1;</code>
+     */
+    public java.lang.String getMessage() {
+      java.lang.Object ref = message_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs =
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          message_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>optional string message = 1;</code>
+     */
+    public com.google.protobuf.ByteString
+        getMessageBytes() {
+      java.lang.Object ref = message_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b =
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        message_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    private void initFields() {
+      message_ = "";
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeBytes(1, getMessageBytes());
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(1, getMessageBytes());
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto) obj;
+
+      boolean result = true;
+      result = result && (hasMessage() == other.hasMessage());
+      if (hasMessage()) {
+        result = result && getMessage()
+            .equals(other.getMessage());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (hasMessage()) {
+        hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
+        hash = (53 * hash) + getMessage().hashCode();
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hadoop.common.OptResponseProto}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProtoOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_OptResponseProto_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_OptResponseProto_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        message_ = "";
+        bitField0_ = (bitField0_ & ~0x00000001);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_OptResponseProto_descriptor;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto getDefaultInstanceForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto build() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto buildPartial() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.message_ = message_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto) {
+          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto other) {
+        if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.getDefaultInstance()) return this;
+        if (other.hasMessage()) {
+          bitField0_ |= 0x00000001;
+          message_ = other.message_;
+          onChanged();
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // optional string message = 1;
+      private java.lang.Object message_ = "";
+      /**
+       * <code>optional string message = 1;</code>
+       */
+      public boolean hasMessage() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      /**
+       * <code>optional string message = 1;</code>
+       */
+      public java.lang.String getMessage() {
+        java.lang.Object ref = message_;
+        if (!(ref instanceof java.lang.String)) {
+          java.lang.String s = ((com.google.protobuf.ByteString) ref)
+              .toStringUtf8();
+          message_ = s;
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>optional string message = 1;</code>
+       */
+      public com.google.protobuf.ByteString
+          getMessageBytes() {
+        java.lang.Object ref = message_;
+        if (ref instanceof String) {
+          com.google.protobuf.ByteString b =
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          message_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>optional string message = 1;</code>
+       */
+      public Builder setMessage(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+        message_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string message = 1;</code>
+       */
+      public Builder clearMessage() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        message_ = getDefaultInstance().getMessage();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string message = 1;</code>
+       */
+      public Builder setMessageBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+        message_ = value;
+        onChanged();
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hadoop.common.OptResponseProto)
+    }
+
+    static {
+      defaultInstance = new OptResponseProto(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.OptResponseProto)
+  }
+
+  public interface SleepRequestProtoOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // required int32 milliSeconds = 1;
+    /**
+     * <code>required int32 milliSeconds = 1;</code>
+     */
+    boolean hasMilliSeconds();
+    /**
+     * <code>required int32 milliSeconds = 1;</code>
+     */
+    int getMilliSeconds();
+  }
+  /**
+   * Protobuf type {@code hadoop.common.SleepRequestProto}
+   */
+  public static final class SleepRequestProto extends
+      com.google.protobuf.GeneratedMessage
+      implements SleepRequestProtoOrBuilder {
+    // Use SleepRequestProto.newBuilder() to construct.
+    private SleepRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private SleepRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final SleepRequestProto defaultInstance;
+    public static SleepRequestProto getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public SleepRequestProto getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private SleepRequestProto(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 8: {
+              bitField0_ |= 0x00000001;
+              milliSeconds_ = input.readInt32();
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepRequestProto_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepRequestProto_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<SleepRequestProto> PARSER =
+        new com.google.protobuf.AbstractParser<SleepRequestProto>() {
+      public SleepRequestProto parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new SleepRequestProto(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<SleepRequestProto> getParserForType() {
+      return PARSER;
+    }
+
+    private int bitField0_;
+    // required int32 milliSeconds = 1;
+    public static final int MILLISECONDS_FIELD_NUMBER = 1;
+    private int milliSeconds_;
+    /**
+     * <code>required int32 milliSeconds = 1;</code>
+     */
+    public boolean hasMilliSeconds() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>required int32 milliSeconds = 1;</code>
+     */
+    public int getMilliSeconds() {
+      return milliSeconds_;
+    }
+
+    private void initFields() {
+      milliSeconds_ = 0;
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      if (!hasMilliSeconds()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeInt32(1, milliSeconds_);
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt32Size(1, milliSeconds_);
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto) obj;
+
+      boolean result = true;
+      result = result && (hasMilliSeconds() == other.hasMilliSeconds());
+      if (hasMilliSeconds()) {
+        result = result && (getMilliSeconds()
+            == other.getMilliSeconds());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (hasMilliSeconds()) {
+        hash = (37 * hash) + MILLISECONDS_FIELD_NUMBER;
+        hash = (53 * hash) + getMilliSeconds();
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hadoop.common.SleepRequestProto}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProtoOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepRequestProto_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepRequestProto_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        milliSeconds_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000001);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepRequestProto_descriptor;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto getDefaultInstanceForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto build() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto buildPartial() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.milliSeconds_ = milliSeconds_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto) {
+          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto other) {
+        if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.getDefaultInstance()) return this;
+        if (other.hasMilliSeconds()) {
+          setMilliSeconds(other.getMilliSeconds());
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        if (!hasMilliSeconds()) {
+          return false;
+        }
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // required int32 milliSeconds = 1;
+      private int milliSeconds_ ;
+      /**
+       * <code>required int32 milliSeconds = 1;</code>
+       */
+      public boolean hasMilliSeconds() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      /**
+       * <code>required int32 milliSeconds = 1;</code>
+       */
+      public int getMilliSeconds() {
+        return milliSeconds_;
+      }
+      /**
+       * <code>required int32 milliSeconds = 1;</code>
+       */
+      public Builder setMilliSeconds(int value) {
+        bitField0_ |= 0x00000001;
+        milliSeconds_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required int32 milliSeconds = 1;</code>
+       */
+      public Builder clearMilliSeconds() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        milliSeconds_ = 0;
+        onChanged();
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hadoop.common.SleepRequestProto)
+    }
+
+    static {
+      defaultInstance = new SleepRequestProto(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.SleepRequestProto)
+  }
+
+  public interface SleepResponseProtoOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+  }
+  /**
+   * Protobuf type {@code hadoop.common.SleepResponseProto}
+   */
+  public static final class SleepResponseProto extends
+      com.google.protobuf.GeneratedMessage
+      implements SleepResponseProtoOrBuilder {
+    // Use SleepResponseProto.newBuilder() to construct.
+    private SleepResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private SleepResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final SleepResponseProto defaultInstance;
+    public static SleepResponseProto getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public SleepResponseProto getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private SleepResponseProto(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepResponseProto_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepResponseProto_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<SleepResponseProto> PARSER =
+        new com.google.protobuf.AbstractParser<SleepResponseProto>() {
+      public SleepResponseProto parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new SleepResponseProto(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<SleepResponseProto> getParserForType() {
+      return PARSER;
+    }
+
+    private void initFields() {
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto) obj;
+
+      boolean result = true;
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hadoop.common.SleepResponseProto}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProtoOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepResponseProto_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepResponseProto_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepResponseProto_descriptor;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto getDefaultInstanceForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto build() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto buildPartial() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto(this);
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto) {
+          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto other) {
+        if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.getDefaultInstance()) return this;
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hadoop.common.SleepResponseProto)
+    }
+
+    static {
+      defaultInstance = new SleepResponseProto(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.SleepResponseProto)
+  }
+
+  public interface SlowPingRequestProtoOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // required bool shouldSlow = 1;
+    /**
+     * <code>required bool shouldSlow = 1;</code>
+     */
+    boolean hasShouldSlow();
+    /**
+     * <code>required bool shouldSlow = 1;</code>
+     */
+    boolean getShouldSlow();
+  }
+  /**
+   * Protobuf type {@code hadoop.common.SlowPingRequestProto}
+   */
+  public static final class SlowPingRequestProto extends
+      com.google.protobuf.GeneratedMessage
+      implements SlowPingRequestProtoOrBuilder {
+    // Use SlowPingRequestProto.newBuilder() to construct.
+    private SlowPingRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private SlowPingRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final SlowPingRequestProto defaultInstance;
+    public static SlowPingRequestProto getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public SlowPingRequestProto getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private SlowPingRequestProto(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 8: {
+              bitField0_ |= 0x00000001;
+              shouldSlow_ = input.readBool();
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SlowPingRequestProto_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SlowPingRequestProto_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<SlowPingRequestProto> PARSER =
+        new com.google.protobuf.AbstractParser<SlowPingRequestProto>() {
+      public SlowPingRequestProto parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new SlowPingRequestProto(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<SlowPingRequestProto> getParserForType() {
+      return PARSER;
+    }
+
+    private int bitField0_;
+    // required bool shouldSlow = 1;
+    public static final int SHOULDSLOW_FIELD_NUMBER = 1;
+    private boolean shouldSlow_;
+    /**
+     * <code>required bool shouldSlow = 1;</code>
+     */
+    public boolean hasShouldSlow() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>required bool shouldSlow = 1;</code>
+     */
+    public boolean getShouldSlow() {
+      return shouldSlow_;
+    }
+
+    private void initFields() {
+      shouldSlow_ = false;
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      if (!hasShouldSlow()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeBool(1, shouldSlow_);
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBoolSize(1, shouldSlow_);
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto) obj;
+
+      boolean result = true;
+      result = result && (hasShouldSlow() == other.hasShouldSlow());
+      if (hasShouldSlow()) {
+        result = result && (getShouldSlow()
+            == other.getShouldSlow());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (hasShouldSlow()) {
+        hash = (37 * hash) + SHOULDSLOW_FIELD_NUMBER;
+        hash = (53 * hash) + hashBoolean(getShouldSlow());
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hadoop.common.SlowPingRequestProto}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProtoOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SlowPingRequestProto_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SlowPingRequestProto_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        shouldSlow_ = false;
+        bitField0_ = (bitField0_ & ~0x00000001);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SlowPingRequestProto_descriptor;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto getDefaultInstanceForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto build() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto buildPartial() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.shouldSlow_ = shouldSlow_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto) {
+          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto other) {
+        if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto.getDefaultInstance()) return this;
+        if (other.hasShouldSlow()) {
+          setShouldSlow(other.getShouldSlow());
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        if (!hasShouldSlow()) {
+
+          return false;
+        }
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // required bool shouldSlow = 1;
+      private boolean shouldSlow_ ;
+      /**
+       * <code>required bool shouldSlow = 1;</code>
+       */
+      public boolean hasShouldSlow() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      /**
+       * <code>required bool shouldSlow = 1;</code>
+       */
+      public boolean getShouldSlow() {
+        return shouldSlow_;
+      }
+      /**
+       * <code>required bool shouldSlow = 1;</code>
+       */
+      public Builder setShouldSlow(boolean value) {
+        bitField0_ |= 0x00000001;
+        shouldSlow_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required bool shouldSlow = 1;</code>
+       */
+      public Builder clearShouldSlow() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        shouldSlow_ = false;
+        onChanged();
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hadoop.common.SlowPingRequestProto)
+    }
+
+    static {
+      defaultInstance = new SlowPingRequestProto(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.SlowPingRequestProto)
+  }
+
+  public interface EchoRequestProto2OrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // repeated string message = 1;
+    /**
+     * <code>repeated string message = 1;</code>
+     */
+    java.util.List<java.lang.String>
+    getMessageList();
+    /**
+     * <code>repeated string message = 1;</code>
+     */
+    int getMessageCount();
+    /**
+     * <code>repeated string message = 1;</code>
+     */
+    java.lang.String getMessage(int index);
+    /**
+     * <code>repeated string message = 1;</code>
+     */
+    com.google.protobuf.ByteString
+        getMessageBytes(int index);
+  }
+  /**
+   * Protobuf type {@code hadoop.common.EchoRequestProto2}
+   */
+  public static final class EchoRequestProto2 extends
+      com.google.protobuf.GeneratedMessage
+      implements EchoRequestProto2OrBuilder {
+    // Use EchoRequestProto2.newBuilder() to construct.
+    private EchoRequestProto2(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private EchoRequestProto2(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final EchoRequestProto2 defaultInstance;
+    public static EchoRequestProto2 getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public EchoRequestProto2 getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private EchoRequestProto2(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 10: {
+              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+                message_ = new com.google.protobuf.LazyStringArrayList();
+                mutable_bitField0_ |= 0x00000001;
+              }
+              message_.add(input.readBytes());
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+          message_ = new com.google.protobuf.UnmodifiableLazyStringList(message_);
+        }
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoRequestProto2_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoRequestProto2_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<EchoRequestProto2> PARSER =
+        new com.google.protobuf.AbstractParser<EchoRequestProto2>() {
+      public EchoRequestProto2 parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new EchoRequestProto2(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<EchoRequestProto2> getParserForType() {
+      return PARSER;
+    }
+
+    // repeated string message = 1;
+    public static final int MESSAGE_FIELD_NUMBER = 1;
+    private com.google.protobuf.LazyStringList message_;
+    /**
+     * <code>repeated string message = 1;</code>
+     */
+    public java.util.List<java.lang.String>
+        getMessageList() {
+      return message_;
+    }
+    /**
+     * <code>repeated string message = 1;</code>
+     */
+    public int getMessageCount() {
+      return message_.size();
+    }
+    /**
+     * <code>repeated string message = 1;</code>
+     */
+    public java.lang.String getMessage(int index) {
+      return message_.get(index);
+    }
+    /**
+     * <code>repeated string message = 1;</code>
+     */
+    public com.google.protobuf.ByteString
+        getMessageBytes(int index) {
+      return message_.getByteString(index);
+    }
+
+    private void initFields() {
+      message_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      for (int i = 0; i < message_.size(); i++) {
+        output.writeBytes(1, message_.getByteString(i));
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      {
+        int dataSize = 0;
+        for (int i = 0; i < message_.size(); i++) {
+          dataSize += com.google.protobuf.CodedOutputStream
+            .computeBytesSizeNoTag(message_.getByteString(i));
+        }
+        size += dataSize;
+        size += 1 * getMessageList().size();
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2) obj;
+
+      boolean result = true;
+      result = result && getMessageList()
+          .equals(other.getMessageList());
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (getMessageCount() > 0) {
+        hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
+        hash = (53 * hash) + getMessageList().hashCode();
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hadoop.common.EchoRequestProto2}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2OrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoRequestProto2_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoRequestProto2_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        message_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+        bitField0_ = (bitField0_ & ~0x00000001);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoRequestProto2_descriptor;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 getDefaultInstanceForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 build() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 buildPartial() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2(this);
+        int from_bitField0_ = bitField0_;
+        if (((bitField0_ & 0x00000001) == 0x00000001)) {
+          message_ = new com.google.protobuf.UnmodifiableLazyStringList(
+              message_);
+          bitField0_ = (bitField0_ & ~0x00000001);
+        }
+        result.message_ = message_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2) {
+          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 other) {
+        if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2.getDefaultInstance()) return this;
+        if (!other.message_.isEmpty()) {
+          if (message_.isEmpty()) {
+            message_ = other.message_;
+            bitField0_ = (bitField0_ & ~0x00000001);
+          } else {
+            ensureMessageIsMutable();
+            message_.addAll(other.message_);
+          }
+          onChanged();
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // repeated string message = 1;
+      private com.google.protobuf.LazyStringList message_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+      private void ensureMessageIsMutable() {
+        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+          message_ = new com.google.protobuf.LazyStringArrayList(message_);
+          bitField0_ |= 0x00000001;
+         }
+      }
+      /**
+       * <code>repeated string message = 1;</code>
+       */
+      public java.util.List<java.lang.String>
+          getMessageList() {
+        return java.util.Collections.unmodifiableList(message_);
+      }
+      /**
+       * <code>repeated string message = 1;</code>
+       */
+      public int getMessageCount() {
+        return message_.size();
+      }
+      /**
+       * <code>repeated string message = 1;</code>
+       */
+      public java.lang.String getMessage(int index) {
+        return message_.get(index);
+      }
+      /**
+       * <code>repeated string message = 1;</code>
+       */
+      public com.google.protobuf.ByteString
+          getMessageBytes(int index) {
+        return message_.getByteString(index);
+      }
+      /**
+       * <code>repeated string message = 1;</code>
+       */
+      public Builder setMessage(
+          int index, java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  ensureMessageIsMutable();
+        message_.set(index, value);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated string message = 1;</code>
+       */
+      public Builder addMessage(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  ensureMessageIsMutable();
+        message_.add(value);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated string message = 1;</code>
+       */
+      public Builder addAllMessage(
+          java.lang.Iterable<java.lang.String> values) {
+        ensureMessageIsMutable();
+        super.addAll(values, message_);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated string message = 1;</code>
+       */
+      public Builder clearMessage() {
+        message_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+        bitField0_ = (bitField0_ & ~0x00000001);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated string message = 1;</code>
+       */
+      public Builder addMessageBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  ensureMessageIsMutable();
+        message_.add(value);
+        onChanged();
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hadoop.common.EchoRequestProto2)
+    }
+
+    static {
+      defaultInstance = new EchoRequestProto2(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.EchoRequestProto2)
+  }
+
+  public interface EchoResponseProto2OrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // repeated string message = 1;
+    /**
+     * <code>repeated string message = 1;</code>
+     */
+    java.util.List<java.lang.String>
+    getMessageList();
+    /**
+     * <code>repeated string message = 1;</code>
+     */
+    int getMessageCount();
+    /**
+     * <code>repeated string message = 1;</code>
+     */
+    java.lang.String getMessage(int index);
+    /**
+     * <code>repeated string message = 1;</code>
+     */
+    com.google.protobuf.ByteString
+        getMessageBytes(int index);
+  }
+  /**
+   * Protobuf type {@code hadoop.common.EchoResponseProto2}
+   */
+  public static final class EchoResponseProto2 extends
+      com.google.protobuf.GeneratedMessage
+      implements EchoResponseProto2OrBuilder {
+    // Use EchoResponseProto2.newBuilder() to construct.
+    private EchoResponseProto2(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private EchoResponseProto2(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final EchoResponseProto2 defaultInstance;
+    public static EchoResponseProto2 getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public EchoResponseProto2 getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private EchoResponseProto2(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 10: {
+              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+                message_ = new com.google.protobuf.LazyStringArrayList();
+                mutable_bitField0_ |= 0x00000001;
+              }
+              message_.add(input.readBytes());
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+          message_ = new com.google.protobuf.UnmodifiableLazyStringList(message_);
+        }
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoResponseProto2_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoResponseProto2_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<EchoResponseProto2> PARSER =
+        new com.google.protobuf.AbstractParser<EchoResponseProto2>() {
+      public EchoResponseProto2 parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new EchoResponseProto2(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<EchoResponseProto2> getParserForType() {
+      return PARSER;
+    }
+
+    // repeated string message = 1;
+    public static final int MESSAGE_FIELD_NUMBER = 1;
+    private com.google.protobuf.LazyStringList message_;
+    /**
+     * <code>repeated string message = 1;</code>
+     */
+    public java.util.List<java.lang.String>
+        getMessageList() {
+      return message_;
+    }
+    /**
+     * <code>repeated string message = 1;</code>
+     */
+    public int getMessageCount() {
+      return message_.size();
+    }
+    /**
+     * <code>repeated string message = 1;</code>
+     */
+    public java.lang.String getMessage(int index) {
+      return message_.get(index);
+    }
+    /**
+     * <code>repeated string message = 1;</code>
+     */
+    public com.google.protobuf.ByteString
+        getMessageBytes(int index) {
+      return message_.getByteString(index);
+    }
+
+    private void initFields() {
+      message_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      for (int i = 0; i < message_.size(); i++) {
+        output.writeBytes(1, message_.getByteString(i));
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      {
+        int dataSize = 0;
+        for (int i = 0; i < message_.size(); i++) {
+          dataSize += com.google.protobuf.CodedOutputStream
+            .computeBytesSizeNoTag(message_.getByteString(i));
+        }
+        size += dataSize;
+        size += 1 * getMessageList().size();
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2) obj;
+
+      boolean result = true;
+      result = result && getMessageList()
+          .equals(other.getMessageList());
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (getMessageCount() > 0) {
+        hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
+        hash = (53 * hash) + getMessageList().hashCode();
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hadoop.common.EchoResponseProto2}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2OrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoResponseProto2_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoResponseProto2_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        message_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+        bitField0_ = (bitField0_ & ~0x00000001);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoResponseProto2_descriptor;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 getDefaultInstanceForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 build() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 buildPartial() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2(this);
+        int from_bitField0_ = bitField0_;
+        if (((bitField0_ & 0x00000001) == 0x00000001)) {
+          message_ = new com.google.protobuf.UnmodifiableLazyStringList(
+              message_);
+          bitField0_ = (bitField0_ & ~0x00000001);
+        }
+        result.message_ = message_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2) {
+          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 other) {
+        if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.getDefaultInstance()) return this;
+        if (!other.message_.isEmpty()) {
+          if (message_.isEmpty()) {
+            message_ = other.message_;
+            bitField0_ = (bitField0_ & ~0x00000001);
+          } else {
+            ensureMessageIsMutable();
+            message_.addAll(other.message_);
+          }
+          onChanged();
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // repeated string message = 1;
+      private com.google.protobuf.LazyStringList message_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+      private void ensureMessageIsMutable() {
+        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+          message_ = new com.google.protobuf.LazyStringArrayList(message_);
+          bitField0_ |= 0x00000001;
+         }
+      }
+      /**
+       * <code>repeated string message = 1;</code>
+       */
+      public java.util.List<java.lang.String>
+          getMessageList() {
+        return java.util.Collections.unmodifiableList(message_);
+      }
+      /**
+       * <code>repeated string message = 1;</code>
+       */
+      public int getMessageCount() {
+        return message_.size();
+      }
+      /**
+       * <code>repeated string message = 1;</code>
+       */
+      public java.lang.String getMessage(int index) {
+        return message_.get(index);
+      }
+      /**
+       * <code>repeated string message = 1;</code>
+       */
+      public com.google.protobuf.ByteString
+          getMessageBytes(int index) {
+        return message_.getByteString(index);
+      }
+      /**
+       * <code>repeated string message = 1;</code>
+       */
+      public Builder setMessage(
+          int index, java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  ensureMessageIsMutable();
+        message_.set(index, value);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated string message = 1;</code>
+       */
+      public Builder addMessage(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  ensureMessageIsMutable();
+        message_.add(value);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated string message = 1;</code>
+       */
+      public Builder addAllMessage(
+          java.lang.Iterable<java.lang.String> values) {
+        ensureMessageIsMutable();
+        super.addAll(values, message_);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated string message = 1;</code>
+       */
+      public Builder clearMessage() {
+        message_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+        bitField0_ = (bitField0_ & ~0x00000001);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated string message = 1;</code>
+       */
+      public Builder addMessageBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  ensureMessageIsMutable();
+        message_.add(value);
+        onChanged();
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hadoop.common.EchoResponseProto2)
+    }
+
+    static {
+      defaultInstance = new EchoResponseProto2(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.EchoResponseProto2)
+  }
+
+  public interface AddRequestProtoOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // required int32 param1 = 1;
+    /**
+     * <code>required int32 param1 = 1;</code>
+     */
+    boolean hasParam1();
+    /**
+     * <code>required int32 param1 = 1;</code>
+     */
+    int getParam1();
+
+    // required int32 param2 = 2;
+    /**
+     * <code>required int32 param2 = 2;</code>
+     */
+    boolean hasParam2();
+    /**
+     * <code>required int32 param2 = 2;</code>
+     */
+    int getParam2();
+  }
+  /**
+   * Protobuf type {@code hadoop.common.AddRequestProto}
+   */
+  public static final class AddRequestProto extends
+      com.google.protobuf.GeneratedMessage
+      implements AddRequestProtoOrBuilder {
+    // Use AddRequestProto.newBuilder() to construct.
+    private AddRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private AddRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final AddRequestProto defaultInstance;
+    public static AddRequestProto getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public AddRequestProto getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private AddRequestProto(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 8: {
+              bitField0_ |= 0x00000001;
+              param1_ = input.readInt32();
+              break;
+            }
+            case 16: {
+              bitField0_ |= 0x00000002;
+              param2_ = input.readInt32();
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddRequestProto_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddRequestProto_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<AddRequestProto> PARSER =
+        new com.google.protobuf.AbstractParser<AddRequestProto>() {
+      public AddRequestProto parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new AddRequestProto(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<AddRequestProto> getParserForType() {
+      return PARSER;
+    }
+
+    private int bitField0_;
+    // required int32 param1 = 1;
+    public static final int PARAM1_FIELD_NUMBER = 1;
+    private int param1_;
+    /**
+     * <code>required int32 param1 = 1;</code>
+     */
+    public boolean hasParam1() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>required int32 param1 = 1;</code>
+     */
+    public int getParam1() {
+      return param1_;
+    }
+
+    // required int32 param2 = 2;
+    public static final int PARAM2_FIELD_NUMBER = 2;
+    private int param2_;
+    /**
+     * <code>required int32 param2 = 2;</code>
+     */
+    public boolean hasParam2() {
+      return ((bitField0_ & 0x00000002) == 0x00000002);
+    }
+    /**
+     * <code>required int32 param2 = 2;</code>
+     */
+    public int getParam2() {
+      return param2_;
+    }
+
+    private void initFields() {
+      param1_ = 0;
+      param2_ = 0;
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      if (!hasParam1()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      if (!hasParam2()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeInt32(1, param1_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        output.writeInt32(2, param2_);
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt32Size(1, param1_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt32Size(2, param2_);
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto) obj;
+
+      boolean result = true;
+      result = result && (hasParam1() == other.hasParam1());
+      if (hasParam1()) {
+        result = result && (getParam1()
+            == other.getParam1());
+      }
+      result = result && (hasParam2() == other.hasParam2());
+      if (hasParam2()) {
+        result = result && (getParam2()
+            == other.getParam2());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (hasParam1()) {
+        hash = (37 * hash) + PARAM1_FIELD_NUMBER;
+        hash = (53 * hash) + getParam1();
+      }
+      if (hasParam2()) {
+        hash = (37 * hash) + PARAM2_FIELD_NUMBER;
+        hash = (53 * hash) + getParam2();
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hadoop.common.AddRequestProto}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProtoOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddRequestProto_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddRequestProto_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        param1_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000001);
+        param2_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000002);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddRequestProto_descriptor;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto getDefaultInstanceForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto build() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto buildPartial() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.param1_ = param1_;
+        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+          to_bitField0_ |= 0x00000002;
+        }
+        result.param2_ = param2_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto) {
+          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto other) {
+        if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto.getDefaultInstance()) return this;
+        if (other.hasParam1()) {
+          setParam1(other.getParam1());
+        }
+        if (other.hasParam2()) {
+          setParam2(other.getParam2());
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        if (!hasParam1()) {
+
+          return false;
+        }
+        if (!hasParam2()) {
+
+          return false;
+        }
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // required int32 param1 = 1;
+      private int param1_ ;
+      /**
+       * <code>required int32 param1 = 1;</code>
+       */
+      public boolean hasParam1() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      /**
+       * <code>required int32 param1 = 1;</code>
+       */
+      public int getParam1() {
+        return param1_;
+      }
+      /**
+       * <code>required int32 param1 = 1;</code>
+       */
+      public Builder setParam1(int value) {
+        bitField0_ |= 0x00000001;
+        param1_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required int32 param1 = 1;</code>
+       */
+      public Builder clearParam1() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        param1_ = 0;
+        onChanged();
+        return this;
+      }
+
+      // required int32 param2 = 2;
+      private int param2_ ;
+      /**
+       * <code>required int32 param2 = 2;</code>
+       */
+      public boolean hasParam2() {
+        return ((bitField0_ & 0x00000002) == 0x00000002);
+      }
+      /**
+       * <code>required int32 param2 = 2;</code>
+       */
+      public int getParam2() {
+        return param2_;
+      }
+      /**
+       * <code>required int32 param2 = 2;</code>
+       */
+      public Builder setParam2(int value) {
+        bitField0_ |= 0x00000002;
+        param2_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required int32 param2 = 2;</code>
+       */
+      public Builder clearParam2() {
+        bitField0_ = (bitField0_ & ~0x00000002);
+        param2_ = 0;
+        onChanged();
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hadoop.common.AddRequestProto)
+    }
+
+    static {
+      defaultInstance = new AddRequestProto(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.AddRequestProto)
+  }
+
+  public interface AddRequestProto2OrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // repeated int32 params = 1;
+    /**
+     * <code>repeated int32 params = 1;</code>
+     */
+    java.util.List<java.lang.Integer> getParamsList();
+    /**
+     * <code>repeated int32 params = 1;</code>
+     */
+    int getParamsCount();
+    /**
+     * <code>repeated int32 params = 1;</code>
+     */
+    int getParams(int index);
+  }
+  /**
+   * Protobuf type {@code hadoop.common.AddRequestProto2}
+   */
+  public static final class AddRequestProto2 extends
+      com.google.protobuf.GeneratedMessage
+      implements AddRequestProto2OrBuilder {
+    // Use AddRequestProto2.newBuilder() to construct.
+    private AddRequestProto2(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private AddRequestProto2(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final AddRequestProto2 defaultInstance;
+    public static AddRequestProto2 getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public AddRequestProto2 getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private AddRequestProto2(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 8: {
+              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+                params_ = new java.util.ArrayList<java.lang.Integer>();
+                mutable_bitField0_ |= 0x00000001;
+              }
+              params_.add(input.readInt32());
+              break;
+            }
+            case 10: {
+              int length = input.readRawVarint32();
+              int limit = input.pushLimit(length);
+              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) {
+                params_ = new java.util.ArrayList<java.lang.Integer>();
+                mutable_bitField0_ |= 0x00000001;
+              }
+              while (input.getBytesUntilLimit() > 0) {
+                params_.add(input.readInt32());
+              }
+              input.popLimit(limit);
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+          params_ = java.util.Collections.unmodifiableList(params_);
+        }
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddRequestProto2_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddRequestProto2_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<AddRequestProto2> PARSER =
+        new com.google.protobuf.AbstractParser<AddRequestProto2>() {
+      public AddRequestProto2 parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new AddRequestProto2(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<AddRequestProto2> getParserForType() {
+      return PARSER;
+    }
+
+    // repeated int32 params = 1;
+    public static final int PARAMS_FIELD_NUMBER = 1;
+    private java.util.List<java.lang.Integer> params_;
+    /**
+     * <code>repeated int32 params = 1;</code>
+     */
+    public java.util.List<java.lang.Integer>
+        getParamsList() {
+      return params_;
+    }
+    /**
+     * <code>repeated int32 params = 1;</code>
+     */
+    public int getParamsCount() {
+      return params_.size();
+    }
+    /**
+     * <code>repeated int32 params = 1;</code>
+     */
+    public int getParams(int index) {
+      return params_.get(index);
+    }
+
+    private void initFields() {
+      params_ = java.util.Collections.emptyList();
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      for (int i = 0; i < params_.size(); i++) {
+        output.writeInt32(1, params_.get(i));
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      {
+        int dataSize = 0;
+        for (int i = 0; i < params_.size(); i++) {
+          dataSize += com.google.protobuf.CodedOutputStream
+            .computeInt32SizeNoTag(params_.get(i));
+        }
+        size += dataSize;
+        size += 1 * getParamsList().size();
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2) obj;
+
+      boolean result = true;
+      result = result && getParamsList()
+          .equals(other.getParamsList());
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (getParamsCount() > 0) {
+        hash = (37 * hash) + PARAMS_FIELD_NUMBER;
+        hash = (53 * hash) + getParamsList().hashCode();
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hadoop.common.AddRequestProto2}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2OrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddRequestProto2_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddRequestProto2_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        params_ = java.util.Collections.emptyList();
+        bitField0_ = (bitField0_ & ~0x00000001);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddRequestProto2_descriptor;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 getDefaultInstanceForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 build() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 buildPartial() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2(this);
+        int from_bitField0_ = bitField0_;
+        if (((bitField0_ & 0x00000001) == 0x00000001)) {
+          params_ = java.util.Collections.unmodifiableList(params_);
+          bitField0_ = (bitField0_ & ~0x00000001);
+        }
+        result.params_ = params_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2) {
+          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 other) {
+        if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2.getDefaultInstance()) return this;
+        if (!other.params_.isEmpty()) {
+          if (params_.isEmpty()) {
+            params_ = other.params_;
+            bitField0_ = (bitField0_ & ~0x00000001);
+          } else {
+            ensureParamsIsMutable();
+            params_.addAll(other.params_);
+          }
+          onChanged();
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // repeated int32 params = 1;
+      private java.util.List<java.lang.Integer> params_ = java.util.Collections.emptyList();
+      private void ensureParamsIsMutable() {
+        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+          params_ = new java.util.ArrayList<java.lang.Integer>(params_);
+          bitField0_ |= 0x00000001;
+         }
+      }
+      /**
+       * <code>repeated int32 params = 1;</code>
+       */
+      public java.util.List<java.lang.Integer>
+          getParamsList() {
+        return java.util.Collections.unmodifiableList(params_);
+      }
+      /**
+       * <code>repeated int32 params = 1;</code>
+       */
+      public int getParamsCount() {
+        return params_.size();
+      }
+      /**
+       * <code>repeated int32 params = 1;</code>
+       */
+      public int getParams(int index) {
+        return params_.get(index);
+      }
+      /**
+       * <code>repeated int32 params = 1;</code>
+       */
+      public Builder setParams(
+          int index, int value) {
+        ensureParamsIsMutable();
+        params_.set(index, value);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated int32 params = 1;</code>
+       */
+      public Builder addParams(int value) {
+        ensureParamsIsMutable();
+        params_.add(value);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated int32 params = 1;</code>
+       */
+      public Builder addAllParams(
+          java.lang.Iterable<? extends java.lang.Integer> values) {
+        ensureParamsIsMutable();
+        super.addAll(values, params_);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated int32 params = 1;</code>
+       */
+      public Builder clearParams() {
+        params_ = java.util.Collections.emptyList();
+        bitField0_ = (bitField0_ & ~0x00000001);
+        onChanged();
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hadoop.common.AddRequestProto2)
+    }
+
+    static {
+      defaultInstance = new AddRequestProto2(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.AddRequestProto2)
+  }
+
+  public interface AddResponseProtoOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // required int32 result = 1;
+    /**
+     * <code>required int32 result = 1;</code>
+     */
+    boolean hasResult();
+    /**
+     * <code>required int32 result = 1;</code>
+     */
+    int getResult();
+  }
+  /**
+   * Protobuf type {@code hadoop.common.AddResponseProto}
+   */
+  public static final class AddResponseProto extends
+      com.google.protobuf.GeneratedMessage
+      implements AddResponseProtoOrBuilder {
+    // Use AddResponseProto.newBuilder() to construct.
+    private AddResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private AddResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final AddResponseProto defaultInstance;
+    public static AddResponseProto getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public AddResponseProto getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private AddResponseProto(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 8: {
+              bitField0_ |= 0x00000001;
+              result_ = input.readInt32();
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddResponseProto_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddResponseProto_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<AddResponseProto> PARSER =
+        new com.google.protobuf.AbstractParser<AddResponseProto>() {
+      public AddResponseProto parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new AddResponseProto(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<AddResponseProto> getParserForType() {
+      return PARSER;
+    }
+
+    private int bitField0_;
+    // required int32 result = 1;
+    public static final int RESULT_FIELD_NUMBER = 1;
+    private int result_;
+    /**
+     * <code>required int32 result = 1;</code>
+     */
+    public boolean hasResult() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>required int32 result = 1;</code>
+     */
+    public int getResult() {
+      return result_;
+    }
+
+    private void initFields() {
+      result_ = 0;
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      if (!hasResult()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeInt32(1, result_);
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt32Size(1, result_);
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto) obj;
+
+      boolean result = true;
+      result = result && (hasResult() == other.hasResult());
+      if (hasResult()) {
+        result = result && (getResult()
+            == other.getResult());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (hasResult()) {
+        hash = (37 * hash) + RESULT_FIELD_NUMBER;
+        hash = (53 * hash) + getResult();
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hadoop.common.AddResponseProto}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProtoOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddResponseProto_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddResponseProto_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        result_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000001);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddResponseProto_descriptor;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto getDefaultInstanceForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto build() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto buildPartial() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.result_ = result_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto) {
+          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto other) {
+        if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance()) return this;
+        if (other.hasResult()) {
+          setResult(other.getResult());
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        if (!hasResult()) {
+
+          return false;
+        }
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // required int32 result = 1;
+      private int result_ ;
+      /**
+       * <code>required int32 result = 1;</code>
+       */
+      public boolean hasResult() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      /**
+       * <code>required int32 result = 1;</code>
+       */
+      public int getResult() {
+        return result_;
+      }
+      /**
+       * <code>required int32 result = 1;</code>
+       */
+      public Builder setResult(int value) {
+        bitField0_ |= 0x00000001;
+        result_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required int32 result = 1;</code>
+       */
+      public Builder clearResult() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        result_ = 0;
+        onChanged();
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hadoop.common.AddResponseProto)
+    }
+
+    static {
+      defaultInstance = new AddResponseProto(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.AddResponseProto)
+  }
+
+  public interface ExchangeRequestProtoOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // repeated int32 values = 1;
+    /**
+     * <code>repeated int32 values = 1;</code>
+     */
+    java.util.List<java.lang.Integer> getValuesList();
+    /**
+     * <code>repeated int32 values = 1;</code>
+     */
+    int getValuesCount();
+    /**
+     * <code>repeated int32 values = 1;</code>
+     */
+    int getValues(int index);
+  }
+  /**
+   * Protobuf type {@code hadoop.common.ExchangeRequestProto}
+   */
+  public static final class ExchangeRequestProto extends
+      com.google.protobuf.GeneratedMessage
+      implements ExchangeRequestProtoOrBuilder {
+    // Use ExchangeRequestProto.newBuilder() to construct.
+    private ExchangeRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private ExchangeRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final ExchangeRequestProto defaultInstance;
+    public static ExchangeRequestProto getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public ExchangeRequestProto getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private ExchangeRequestProto(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 8: {
+              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+                values_ = new java.util.ArrayList<java.lang.Integer>();
+                mutable_bitField0_ |= 0x00000001;
+              }
+              values_.add(input.readInt32());
+              break;
+            }
+            case 10: {
+              int length = input.readRawVarint32();
+              int limit = input.pushLimit(length);
+              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) {
+                values_ = new java.util.ArrayList<java.lang.Integer>();
+                mutable_bitField0_ |= 0x00000001;
+              }
+              while (input.getBytesUntilLimit() > 0) {
+                values_.add(input.readInt32());
+              }
+              input.popLimit(limit);
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+          values_ = java.util.Collections.unmodifiableList(values_);
+        }
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_ExchangeRequestProto_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_ExchangeRequestProto_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<ExchangeRequestProto> PARSER =
+        new com.google.protobuf.AbstractParser<ExchangeRequestProto>() {
+      public ExchangeRequestProto parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new ExchangeRequestProto(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<ExchangeRequestProto> getParserForType() {
+      return PARSER;
+    }
+
+    // repeated int32 values = 1;
+    public static final int VALUES_FIELD_NUMBER = 1;
+    private java.util.List<java.lang.Integer> values_;
+    /**
+     * <code>repeated int32 values = 1;</code>
+     */
+    public java.util.List<java.lang.Integer>
+        getValuesList() {
+      return values_;
+    }
+    /**
+     * <code>repeated int32 values = 1;</code>
+     */
+    public int getValuesCount() {
+      return values_.size();
+    }
+    /**
+     * <code>repeated int32 values = 1;</code>
+     */
+    public int getValues(int index) {
+      return values_.get(index);
+    }
+
+    private void initFields() {
+      values_ = java.util.Collections.emptyList();
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      for (int i = 0; i < values_.size(); i++) {
+        output.writeInt32(1, values_.get(i));
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      {
+        int dataSize = 0;
+        for (int i = 0; i < values_.size(); i++) {
+          dataSize += com.google.protobuf.CodedOutputStream
+            .computeInt32SizeNoTag(values_.get(i));
+        }
+        size += dataSize;
+        size += 1 * getValuesList().size();
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto) obj;
+
+      boolean result = true;
+      result = result && getValuesList()
+          .equals(other.getValuesList());
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (getValuesCount() > 0) {
+        hash = (37 * hash) + VALUES_FIELD_NUMBER;
+        hash = (53 * hash) + getValuesList().hashCode();
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hadoop.common.ExchangeRequestProto}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProtoOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_ExchangeRequestProto_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_ExchangeRequestProto_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        values_ = java.util.Collections.emptyList();
+        bitField0_ = (bitField0_ & ~0x00000001);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_ExchangeRequestProto_descriptor;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto getDefaultInstanceForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto build() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto buildPartial() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto(this);
+        int from_bitField0_ = bitField0_;
+        if (((bitField0_ & 0x00000001) == 0x00000001)) {
+          values_ = java.util.Collections.unmodifiableList(values_);
+          bitField0_ = (bitField0_ & ~0x00000001);
+        }
+        result.values_ = values_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto) {
+          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto other) {
+        if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto.getDefaultInstance()) return this;
+        if (!other.values_.isEmpty()) {
+          if (values_.isEmpty()) {
+            values_ = other.values_;
+            bitField0_ = (bitField0_ & ~0x00000001);
+          } else {
+            ensureValuesIsMutable();
+            values_.addAll(other.values_);
+          }
+          onChanged();
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // repeated int32 values = 1;
+      private java.util.List<java.lang.Integer> values_ = java.util.Collections.emptyList();
+      private void ensureValuesIsMutable() {
+        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+          values_ = new java.util.ArrayList<java.lang.Integer>(values_);
+          bitField0_ |= 0x00000001;
+         }
+      }
+      /**
+       * <code>repeated int32 values = 1;</code>
+       */
+      public java.util.List<java.lang.Integer>
+          getValuesList() {
+        return java.util.Collections.unmodifiableList(values_);
+      }
+      /**
+       * <code>repeated int32 values = 1;</code>
+       */
+      public int getValuesCount() {
+        return values_.size();
+      }
+      /**
+       * <code>repeated int32 values = 1;</code>
+       */
+      public int getValues(int index) {
+        return values_.get(index);
+      }
+      /**
+       * <code>repeated int32 values = 1;</code>
+       */
+      public Builder setValues(
+          int index, int value) {
+        ensureValuesIsMutable();
+        values_.set(index, value);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated int32 values = 1;</code>
+       */
+      public Builder addValues(int value) {
+        ensureValuesIsMutable();
+        values_.add(value);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated int32 values = 1;</code>
+       */
+      public Builder addAllValues(
+          java.lang.Iterable<? extends java.lang.Integer> values) {
+        ensureValuesIsMutable();
+        super.addAll(values, values_);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated int32 values = 1;</code>
+       */
+      public Builder clearValues() {
+        values_ = java.util.Collections.emptyList();
+        bitField0_ = (bitField0_ & ~0x00000001);
+        onChanged();
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hadoop.common.ExchangeRequestProto)
+    }
+
+    static {
+      defaultInstance = new ExchangeRequestProto(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.ExchangeRequestProto)
+  }
+
+  public interface ExchangeResponseProtoOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // repeated int32 values = 1;
+    /**
+     * <code>repeated int32 values = 1;</code>
+     */
+    java.util.List<java.lang.Integer> getValuesList();
+    /**
+     * <code>repeated int32 values = 1;</code>
+     */
+    int getValuesCount();
+    /**
+     * <code>repeated int32 values = 1;</code>
+     */
+    int getValues(int index);
+  }
+  /**
+   * Protobuf type {@code hadoop.common.ExchangeResponseProto}
+   */
+  public static final class ExchangeResponseProto extends
+      com.google.protobuf.GeneratedMessage
+      implements ExchangeResponseProtoOrBuilder {
+    // Use ExchangeResponseProto.newBuilder() to construct.
+    private ExchangeResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private ExchangeResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final ExchangeResponseProto defaultInstance;
+    public static ExchangeResponseProto getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public ExchangeResponseProto getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private ExchangeResponseProto(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 8: {
+              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+                values_ = new java.util.ArrayList<java.lang.Integer>();
+                mutable_bitField0_ |= 0x00000001;
+              }
+              values_.add(input.readInt32());
+              break;
+            }
+            case 10: {
+              int length = input.readRawVarint32();
+              int limit = input.pushLimit(length);
+              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) {
+                values_ = new java.util.ArrayList<java.lang.Integer>();
+                mutable_bitField0_ |= 0x00000001;
+              }
+              while (input.getBytesUntilLimit() > 0) {
+                values_.add(input.readInt32());
+              }
+              input.popLimit(limit);
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+          values_ = java.util.Collections.unmodifiableList(values_);
+        }
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_ExchangeResponseProto_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_ExchangeResponseProto_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<ExchangeResponseProto> PARSER =
+        new com.google.protobuf.AbstractParser<ExchangeResponseProto>() {
+      public ExchangeResponseProto parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new ExchangeResponseProto(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<ExchangeResponseProto> getParserForType() {
+      return PARSER;
+    }
+
+    // repeated int32 values = 1;
+    public static final int VALUES_FIELD_NUMBER = 1;
+    private java.util.List<java.lang.Integer> values_;
+    /**
+     * <code>repeated int32 values = 1;</code>
+     */
+    public java.util.List<java.lang.Integer>
+        getValuesList() {
+      return values_;
+    }
+    /**
+     * <code>repeated int32 values = 1;</code>
+     */
+    public int getValuesCount() {
+      return values_.size();
+    }
+    /**
+     * <code>repeated int32 values = 1;</code>
+     */
+    public int getValues(int index) {
+      return values_.get(index);
+    }
+
+    private void initFields() {
+      values_ = java.util.Collections.emptyList();
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      for (int i = 0; i < values_.size(); i++) {
+        output.writeInt32(1, values_.get(i));
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      {
+        int dataSize = 0;
+        for (int i = 0; i < values_.size(); i++) {
+          dataSize += com.google.protobuf.CodedOutputStream
+            .computeInt32SizeNoTag(values_.get(i));
+        }
+        size += dataSize;
+        size += 1 * getValuesList().size();
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto) obj;
+
+      boolean result = true;
+      result = result && getValuesList()
+          .equals(other.getValuesList());
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (getValuesCount() > 0) {
+        hash = (37 * hash) + VALUES_FIELD_NUMBER;
+        hash = (53 * hash) + getValuesList().hashCode();
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hadoop.common.ExchangeResponseProto}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProtoOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_ExchangeResponseProto_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_ExchangeResponseProto_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        values_ = java.util.Collections.emptyList();
+        bitField0_ = (bitField0_ & ~0x00000001);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_ExchangeResponseProto_descriptor;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto getDefaultInstanceForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto build() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto buildPartial() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto(this);
+        int from_bitField0_ = bitField0_;
+        if (((bitField0_ & 0x00000001) == 0x00000001)) {
+          values_ = java.util.Collections.unmodifiableList(values_);
+          bitField0_ = (bitField0_ & ~0x00000001);
+        }
+        result.values_ = values_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto) {
+          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto other) {
+        if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.getDefaultInstance()) return this;
+        if (!other.values_.isEmpty()) {
+          if (values_.isEmpty()) {
+            values_ = other.values_;
+            bitField0_ = (bitField0_ & ~0x00000001);
+          } else {
+            ensureValuesIsMutable();
+            values_.addAll(other.values_);
+          }
+          onChanged();
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // repeated int32 values = 1;
+      private java.util.List<java.lang.Integer> values_ = java.util.Collections.emptyList();
+      private void ensureValuesIsMutable() {
+        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+          values_ = new java.util.ArrayList<java.lang.Integer>(values_);
+          bitField0_ |= 0x00000001;
+         }
+      }
+      /**
+       * <code>repeated int32 values = 1;</code>
+       */
+      public java.util.List<java.lang.Integer>
+          getValuesList() {
+        return java.util.Collections.unmodifiableList(values_);
+      }
+      /**
+       * <code>repeated int32 values = 1;</code>
+       */
+      public int getValuesCount() {
+        return values_.size();
+      }
+      /**
+       * <code>repeated int32 values = 1;</code>
+       */
+      public int getValues(int index) {
+        return values_.get(index);
+      }
+      /**
+       * <code>repeated int32 values = 1;</code>
+       */
+      public Builder setValues(
+          int index, int value) {
+        ensureValuesIsMutable();
+        values_.set(index, value);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated int32 values = 1;</code>
+       */
+      public Builder addValues(int value) {
+        ensureValuesIsMutable();
+        values_.add(value);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated int32 values = 1;</code>
+       */
+      public Builder addAllValues(
+          java.lang.Iterable<? extends java.lang.Integer> values) {
+        ensureValuesIsMutable();
+        super.addAll(values, values_);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated int32 values = 1;</code>
+       */
+      public Builder clearValues() {
+        values_ = java.util.Collections.emptyList();
+        bitField0_ = (bitField0_ & ~0x00000001);
+        onChanged();
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hadoop.common.ExchangeResponseProto)
+    }
+
+    static {
+      defaultInstance = new ExchangeResponseProto(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.ExchangeResponseProto)
+  }
+
+  public interface AuthMethodResponseProtoOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // required int32 code = 1;
+    /**
+     * <code>required int32 code = 1;</code>
+     */
+    boolean hasCode();
+    /**
+     * <code>required int32 code = 1;</code>
+     */
+    int getCode();
+
+    // required string mechanismName = 2;
+    /**
+     * <code>required string mechanismName = 2;</code>
+     */
+    boolean hasMechanismName();
+    /**
+     * <code>required string mechanismName = 2;</code>
+     */
+    java.lang.String getMechanismName();
+    /**
+     * <code>required string mechanismName = 2;</code>
+     */
+    com.google.protobuf.ByteString
+        getMechanismNameBytes();
+  }
+  /**
+   * Protobuf type {@code hadoop.common.AuthMethodResponseProto}
+   */
+  public static final class AuthMethodResponseProto extends
+      com.google.protobuf.GeneratedMessage
+      implements AuthMethodResponseProtoOrBuilder {
+    // Use AuthMethodResponseProto.newBuilder() to construct.
+    private AuthMethodResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private AuthMethodResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final AuthMethodResponseProto defaultInstance;
+    public static AuthMethodResponseProto getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public AuthMethodResponseProto getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private AuthMethodResponseProto(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 8: {
+              bitField0_ |= 0x00000001;
+              code_ = input.readInt32();
+              break;
+            }
+            case 18: {
+              bitField0_ |= 0x00000002;
+              mechanismName_ = input.readBytes();
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AuthMethodResponseProto_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AuthMethodResponseProto_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<AuthMethodResponseProto> PARSER =
+        new com.google.protobuf.AbstractParser<AuthMethodResponseProto>() {
+      public AuthMethodResponseProto parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new AuthMethodResponseProto(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<AuthMethodResponseProto> getParserForType() {
+      return PARSER;
+    }
+
+    private int bitField0_;
+    // required int32 code = 1;
+    public static final int CODE_FIELD_NUMBER = 1;
+    private int code_;
+    /**
+     * <code>required int32 code = 1;</code>
+     */
+    public boolean hasCode() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>required int32 code = 1;</code>
+     */
+    public int getCode() {
+      return code_;
+    }
+
+    // required string mechanismName = 2;
+    public static final int MECHANISMNAME_FIELD_NUMBER = 2;
+    private java.lang.Object mechanismName_;
+    /**
+     * <code>required string mechanismName = 2;</code>
+     */
+    public boolean hasMechanismName() {
+      return ((bitField0_ & 0x00000002) == 0x00000002);
+    }
+    /**
+     * <code>required string mechanismName = 2;</code>
+     */
+    public java.lang.String getMechanismName() {
+      java.lang.Object ref = mechanismName_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs =
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          mechanismName_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>required string mechanismName = 2;</code>
+     */
+    public com.google.protobuf.ByteString
+        getMechanismNameBytes() {
+      java.lang.Object ref = mechanismName_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b =
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        mechanismName_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    private void initFields() {
+      code_ = 0;
+      mechanismName_ = "";
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      if (!hasCode()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      if (!hasMechanismName()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeInt32(1, code_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        output.writeBytes(2, getMechanismNameBytes());
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt32Size(1, code_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(2, getMechanismNameBytes());
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto) obj;
+
+      boolean result = true;
+      result = result && (hasCode() == other.hasCode());
+      if (hasCode()) {
+        result = result && (getCode()
+            == other.getCode());
+      }
+      result = result && (hasMechanismName() == other.hasMechanismName());
+      if (hasMechanismName()) {
+        result = result && getMechanismName()
+            .equals(other.getMechanismName());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (hasCode()) {
+        hash = (37 * hash) + CODE_FIELD_NUMBER;
+        hash = (53 * hash) + getCode();
+      }
+      if (hasMechanismName()) {
+        hash = (37 * hash) + MECHANISMNAME_FIELD_NUMBER;
+        hash = (53 * hash) + getMechanismName().hashCode();
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hadoop.common.AuthMethodResponseProto}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProtoOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AuthMethodResponseProto_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AuthMethodResponseProto_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        code_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000001);
+        mechanismName_ = "";
+        bitField0_ = (bitField0_ & ~0x00000002);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AuthMethodResponseProto_descriptor;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto getDefaultInstanceForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto build() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto buildPartial() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.code_ = code_;
+        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+          to_bitField0_ |= 0x00000002;
+        }
+        result.mechanismName_ = mechanismName_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto) {
+          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto other) {
+        if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.getDefaultInstance()) return this;
+        if (other.hasCode()) {
+          setCode(other.getCode());
+        }
+        if (other.hasMechanismName()) {
+          bitField0_ |= 0x00000002;
+          mechanismName_ = other.mechanismName_;
+          onChanged();
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        if (!hasCode()) {
+
+          return false;
+        }
+        if (!hasMechanismName()) {
+
+          return false;
+        }
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // required int32 code = 1;
+      private int code_ ;
+      /**
+       * <code>required int32 code = 1;</code>
+       */
+      public boolean hasCode() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      /**
+       * <code>required int32 code = 1;</code>
+       */
+      public int getCode() {
+        return code_;
+      }
+      /**
+       * <code>required int32 code = 1;</code>
+       */
+      public Builder setCode(int value) {
+        bitField0_ |= 0x00000001;
+        code_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required int32 code = 1;</code>
+       */
+      public Builder clearCode() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        code_ = 0;
+        onChanged();
+        return this;
+      }
+
+      // required string mechanismName = 2;
+      private java.lang.Object mechanismName_ = "";
+      /**
+       * <code>required string mechanismName = 2;</code>
+       */
+      public boolean hasMechanismName() {
+        return ((bitField0_ & 0x00000002) == 0x00000002);
+      }
+      /**
+       * <code>required string mechanismName = 2;</code>
+       */
+      public java.lang.String getMechanismName() {
+        java.lang.Object ref = mechanismName_;
+        if (!(ref instanceof java.lang.String)) {
+          java.lang.String s = ((com.google.protobuf.ByteString) ref)
+              .toStringUtf8();
+          mechanismName_ = s;
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>required string mechanismName = 2;</code>
+       */
+      public com.google.protobuf.ByteString
+          getMechanismNameBytes() {
+        java.lang.Object ref = mechanismName_;
+        if (ref instanceof String) {
+          com.google.protobuf.ByteString b =
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          mechanismName_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>required string mechanismName = 2;</code>
+       */
+      public Builder setMechanismName(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000002;
+        mechanismName_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required string mechanismName = 2;</code>
+       */
+      public Builder clearMechanismName() {
+        bitField0_ = (bitField0_ & ~0x00000002);
+        mechanismName_ = getDefaultInstance().getMechanismName();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required string mechanismName = 2;</code>
+       */
+      public Builder setMechanismNameBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000002;
+        mechanismName_ = value;
+        onChanged();
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hadoop.common.AuthMethodResponseProto)
+    }
+
+    static {
+      defaultInstance = new AuthMethodResponseProto(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.AuthMethodResponseProto)
+  }
+
+  public interface UserResponseProtoOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // required string user = 1;
+    /**
+     * <code>required string user = 1;</code>
+     */
+    boolean hasUser();
+    /**
+     * <code>required string user = 1;</code>
+     */
+    java.lang.String getUser();
+    /**
+     * <code>required string user = 1;</code>
+     */
+    com.google.protobuf.ByteString
+        getUserBytes();
+  }
+  /**
+   * Protobuf type {@code hadoop.common.UserResponseProto}
+   */
+  public static final class UserResponseProto extends
+      com.google.protobuf.GeneratedMessage
+      implements UserResponseProtoOrBuilder {
+    // Use UserResponseProto.newBuilder() to construct.
+    private UserResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private UserResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final UserResponseProto defaultInstance;
+    public static UserResponseProto getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public UserResponseProto getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private UserResponseProto(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 10: {
+              bitField0_ |= 0x00000001;
+              user_ = input.readBytes();
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_UserResponseProto_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_UserResponseProto_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<UserResponseProto> PARSER =
+        new com.google.protobuf.AbstractParser<UserResponseProto>() {
+      public UserResponseProto parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new UserResponseProto(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<UserResponseProto> getParserForType() {
+      return PARSER;
+    }
+
+    private int bitField0_;
+    // required string user = 1;
+    public static final int USER_FIELD_NUMBER = 1;
+    private java.lang.Object user_;
+    /**
+     * <code>required string user = 1;</code>
+     */
+    public boolean hasUser() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>required string user = 1;</code>
+     */
+    public java.lang.String getUser() {
+      java.lang.Object ref = user_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs =
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          user_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>required string user = 1;</code>
+     */
+    public com.google.protobuf.ByteString
+        getUserBytes() {
+      java.lang.Object ref = user_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b =
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        user_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    private void initFields() {
+      user_ = "";
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      if (!hasUser()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeBytes(1, getUserBytes());
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(1, getUserBytes());
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto) obj;
+
+      boolean result = true;
+      result = result && (hasUser() == other.hasUser());
+      if (hasUser()) {
+        result = result && getUser()
+            .equals(other.getUser());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (hasUser()) {
+        hash = (37 * hash) + USER_FIELD_NUMBER;
+        hash = (53 * hash) + getUser().hashCode();
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hadoop.common.UserResponseProto}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProtoOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_UserResponseProto_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_UserResponseProto_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        user_ = "";
+        bitField0_ = (bitField0_ & ~0x00000001);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_UserResponseProto_descriptor;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto getDefaultInstanceForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto build() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto buildPartial() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.user_ = user_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto) {
+          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto other) {
+        if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance()) return this;
+        if (other.hasUser()) {
+          bitField0_ |= 0x00000001;
+          user_ = other.user_;
+          onChanged();
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        if (!hasUser()) {
+
+          return false;
+        }
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // required string user = 1;
+      private java.lang.Object user_ = "";
+      /**
+       * <code>required string user = 1;</code>
+       */
+      public boolean hasUser() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      /**
+       * <code>required string user = 1;</code>
+       */
+      public java.lang.String getUser() {
+        java.lang.Object ref = user_;
+        if (!(ref instanceof java.lang.String)) {
+          java.lang.String s = ((com.google.protobuf.ByteString) ref)
+              .toStringUtf8();
+          user_ = s;
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>required string user = 1;</code>
+       */
+      public com.google.protobuf.ByteString
+          getUserBytes() {
+        java.lang.Object ref = user_;
+        if (ref instanceof String) {
+          com.google.protobuf.ByteString b =
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          user_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>required string user = 1;</code>
+       */
+      public Builder setUser(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+        user_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required string user = 1;</code>
+       */
+      public Builder clearUser() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        user_ = getDefaultInstance().getUser();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required string user = 1;</code>
+       */
+      public Builder setUserBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+        user_ = value;
+        onChanged();
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hadoop.common.UserResponseProto)
+    }
+
+    static {
+      defaultInstance = new UserResponseProto(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.UserResponseProto)
+  }
+
+  public interface SleepRequestProto2OrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // optional int64 sleep_time = 1;
+    /**
+     * <code>optional int64 sleep_time = 1;</code>
+     */
+    boolean hasSleepTime();
+    /**
+     * <code>optional int64 sleep_time = 1;</code>
+     */
+    long getSleepTime();
+  }
+  /**
+   * Protobuf type {@code hadoop.common.SleepRequestProto2}
+   */
+  public static final class SleepRequestProto2 extends
+      com.google.protobuf.GeneratedMessage
+      implements SleepRequestProto2OrBuilder {
+    // Use SleepRequestProto2.newBuilder() to construct.
+    private SleepRequestProto2(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private SleepRequestProto2(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final SleepRequestProto2 defaultInstance;
+    public static SleepRequestProto2 getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public SleepRequestProto2 getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private SleepRequestProto2(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 8: {
+              bitField0_ |= 0x00000001;
+              sleepTime_ = input.readInt64();
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepRequestProto2_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepRequestProto2_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<SleepRequestProto2> PARSER =
+        new com.google.protobuf.AbstractParser<SleepRequestProto2>() {
+      public SleepRequestProto2 parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new SleepRequestProto2(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<SleepRequestProto2> getParserForType() {
+      return PARSER;
+    }
+
+    private int bitField0_;
+    // optional int64 sleep_time = 1;
+    public static final int SLEEP_TIME_FIELD_NUMBER = 1;
+    private long sleepTime_;
+    /**
+     * <code>optional int64 sleep_time = 1;</code>
+     */
+    public boolean hasSleepTime() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>optional int64 sleep_time = 1;</code>
+     */
+    public long getSleepTime() {
+      return sleepTime_;
+    }
+
+    private void initFields() {
+      sleepTime_ = 0L;
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeInt64(1, sleepTime_);
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt64Size(1, sleepTime_);
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2) obj;
+
+      boolean result = true;
+      result = result && (hasSleepTime() == other.hasSleepTime());
+      if (hasSleepTime()) {
+        result = result && (getSleepTime()
+            == other.getSleepTime());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (hasSleepTime()) {
+        hash = (37 * hash) + SLEEP_TIME_FIELD_NUMBER;
+        hash = (53 * hash) + hashLong(getSleepTime());
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hadoop.common.SleepRequestProto2}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2OrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepRequestProto2_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepRequestProto2_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        sleepTime_ = 0L;
+        bitField0_ = (bitField0_ & ~0x00000001);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepRequestProto2_descriptor;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 getDefaultInstanceForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 build() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 buildPartial() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.sleepTime_ = sleepTime_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2) {
+          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 other) {
+        if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2.getDefaultInstance()) return this;
+        if (other.hasSleepTime()) {
+          setSleepTime(other.getSleepTime());
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // optional int64 sleep_time = 1;
+      private long sleepTime_ ;
+      /**
+       * <code>optional int64 sleep_time = 1;</code>
+       */
+      public boolean hasSleepTime() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      /**
+       * <code>optional int64 sleep_time = 1;</code>
+       */
+      public long getSleepTime() {
+        return sleepTime_;
+      }
+      /**
+       * <code>optional int64 sleep_time = 1;</code>
+       */
+      public Builder setSleepTime(long value) {
+        bitField0_ |= 0x00000001;
+        sleepTime_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional int64 sleep_time = 1;</code>
+       */
+      public Builder clearSleepTime() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        sleepTime_ = 0L;
+        onChanged();
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hadoop.common.SleepRequestProto2)
+    }
+
+    static {
+      defaultInstance = new SleepRequestProto2(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.SleepRequestProto2)
+  }
+
+  public interface SleepResponseProto2OrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // optional int64 receive_time = 1;
+    /**
+     * <code>optional int64 receive_time = 1;</code>
+     */
+    boolean hasReceiveTime();
+    /**
+     * <code>optional int64 receive_time = 1;</code>
+     */
+    long getReceiveTime();
+
+    // optional int64 response_time = 2;
+    /**
+     * <code>optional int64 response_time = 2;</code>
+     */
+    boolean hasResponseTime();
+    /**
+     * <code>optional int64 response_time = 2;</code>
+     */
+    long getResponseTime();
+  }
+  /**
+   * Protobuf type {@code hadoop.common.SleepResponseProto2}
+   */
+  public static final class SleepResponseProto2 extends
+      com.google.protobuf.GeneratedMessage
+      implements SleepResponseProto2OrBuilder {
+    // Use SleepResponseProto2.newBuilder() to construct.
+    private SleepResponseProto2(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private SleepResponseProto2(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final SleepResponseProto2 defaultInstance;
+    public static SleepResponseProto2 getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public SleepResponseProto2 getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private SleepResponseProto2(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 8: {
+              bitField0_ |= 0x00000001;
+              receiveTime_ = input.readInt64();
+              break;
+            }
+            case 16: {
+              bitField0_ |= 0x00000002;
+              responseTime_ = input.readInt64();
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepResponseProto2_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepResponseProto2_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<SleepResponseProto2> PARSER =
+        new com.google.protobuf.AbstractParser<SleepResponseProto2>() {
+      public SleepResponseProto2 parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new SleepResponseProto2(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<SleepResponseProto2> getParserForType() {
+      return PARSER;
+    }
+
+    private int bitField0_;
+    // optional int64 receive_time = 1;
+    public static final int RECEIVE_TIME_FIELD_NUMBER = 1;
+    private long receiveTime_;
+    /**
+     * <code>optional int64 receive_time = 1;</code>
+     */
+    public boolean hasReceiveTime() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>optional int64 receive_time = 1;</code>
+     */
+    public long getReceiveTime() {
+      return receiveTime_;
+    }
+
+    // optional int64 response_time = 2;
+    public static final int RESPONSE_TIME_FIELD_NUMBER = 2;
+    private long responseTime_;
+    /**
+     * <code>optional int64 response_time = 2;</code>
+     */
+    public boolean hasResponseTime() {
+      return ((bitField0_ & 0x00000002) == 0x00000002);
+    }
+    /**
+     * <code>optional int64 response_time = 2;</code>
+     */
+    public long getResponseTime() {
+      return responseTime_;
+    }
+
+    private void initFields() {
+      receiveTime_ = 0L;
+      responseTime_ = 0L;
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeInt64(1, receiveTime_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        output.writeInt64(2, responseTime_);
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt64Size(1, receiveTime_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt64Size(2, responseTime_);
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2) obj;
+
+      boolean result = true;
+      result = result && (hasReceiveTime() == other.hasReceiveTime());
+      if (hasReceiveTime()) {
+        result = result && (getReceiveTime()
+            == other.getReceiveTime());
+      }
+      result = result && (hasResponseTime() == other.hasResponseTime());
+      if (hasResponseTime()) {
+        result = result && (getResponseTime()
+            == other.getResponseTime());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (hasReceiveTime()) {
+        hash = (37 * hash) + RECEIVE_TIME_FIELD_NUMBER;
+        hash = (53 * hash) + hashLong(getReceiveTime());
+      }
+      if (hasResponseTime()) {
+        hash = (37 * hash) + RESPONSE_TIME_FIELD_NUMBER;
+        hash = (53 * hash) + hashLong(getResponseTime());
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hadoop.common.SleepResponseProto2}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2OrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepResponseProto2_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepResponseProto2_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        receiveTime_ = 0L;
+        bitField0_ = (bitField0_ & ~0x00000001);
+        responseTime_ = 0L;
+        bitField0_ = (bitField0_ & ~0x00000002);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepResponseProto2_descriptor;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 getDefaultInstanceForType() {
+        return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 build() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 buildPartial() {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.receiveTime_ = receiveTime_;
+        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+          to_bitField0_ |= 0x00000002;
+        }
+        result.responseTime_ = responseTime_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2) {
+          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 other) {
+        if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.getDefaultInstance()) return this;
+        if (other.hasReceiveTime()) {
+          setReceiveTime(other.getReceiveTime());
+        }
+        if (other.hasResponseTime()) {
+          setResponseTime(other.getResponseTime());
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // optional int64 receive_time = 1;
+      private long receiveTime_ ;
+      /**
+       * <code>optional int64 receive_time = 1;</code>
+       */
+      public boolean hasReceiveTime() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      /**
+       * <code>optional int64 receive_time = 1;</code>
+       */
+      public long getReceiveTime() {
+        return receiveTime_;
+      }
+      /**
+       * <code>optional int64 receive_time = 1;</code>
+       */
+      public Builder setReceiveTime(long value) {
+        bitField0_ |= 0x00000001;
+        receiveTime_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional int64 receive_time = 1;</code>
+       */
+      public Builder clearReceiveTime() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        receiveTime_ = 0L;
+        onChanged();
+        return this;
+      }
+
+      // optional int64 response_time = 2;
+      private long responseTime_ ;
+      /**
+       * <code>optional int64 response_time = 2;</code>
+       */
+      public boolean hasResponseTime() {
+        return ((bitField0_ & 0x00000002) == 0x00000002);
+      }
+      /**
+       * <code>optional int64 response_time = 2;</code>
+       */
+      public long getResponseTime() {
+        return responseTime_;
+      }
+      /**
+       * <code>optional int64 response_time = 2;</code>
+       */
+      public Builder setResponseTime(long value) {
+        bitField0_ |= 0x00000002;
+        responseTime_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional int64 response_time = 2;</code>
+       */
+      public Builder clearResponseTime() {
+        bitField0_ = (bitField0_ & ~0x00000002);
+        responseTime_ = 0L;
+        onChanged();
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hadoop.common.SleepResponseProto2)
+    }
+
+    static {
+      defaultInstance = new SleepResponseProto2(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.SleepResponseProto2)
+  }
+
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hadoop_common_EmptyRequestProto_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hadoop_common_EmptyResponseProto_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hadoop_common_EchoRequestProto_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hadoop_common_EchoResponseProto_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hadoop_common_OptRequestProto_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hadoop_common_OptRequestProto_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hadoop_common_OptResponseProto_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hadoop_common_OptResponseProto_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hadoop_common_SleepRequestProto_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hadoop_common_SleepRequestProto_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hadoop_common_SleepResponseProto_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hadoop_common_SleepResponseProto_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hadoop_common_SlowPingRequestProto_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hadoop_common_SlowPingRequestProto_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hadoop_common_EchoRequestProto2_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hadoop_common_EchoRequestProto2_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hadoop_common_EchoResponseProto2_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hadoop_common_EchoResponseProto2_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hadoop_common_AddRequestProto_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hadoop_common_AddRequestProto_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hadoop_common_AddRequestProto2_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hadoop_common_AddRequestProto2_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hadoop_common_AddResponseProto_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hadoop_common_AddResponseProto_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hadoop_common_ExchangeRequestProto_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hadoop_common_ExchangeRequestProto_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hadoop_common_ExchangeResponseProto_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hadoop_common_ExchangeResponseProto_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hadoop_common_AuthMethodResponseProto_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hadoop_common_AuthMethodResponseProto_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hadoop_common_UserResponseProto_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hadoop_common_UserResponseProto_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hadoop_common_SleepRequestProto2_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hadoop_common_SleepRequestProto2_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hadoop_common_SleepResponseProto2_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hadoop_common_SleepResponseProto2_fieldAccessorTable;
+
+  public static com.google.protobuf.Descriptors.FileDescriptor
+      getDescriptor() {
+    return descriptor;
+  }
+  private static com.google.protobuf.Descriptors.FileDescriptor
+      descriptor;
+  static {
+    java.lang.String[] descriptorData = {
+      "\n\021test_legacy.proto\022\rhadoop.common\"\023\n\021Em" +
+      "ptyRequestProto\"\024\n\022EmptyResponseProto\"#\n" +
+      "\020EchoRequestProto\022\017\n\007message\030\001 \002(\t\"$\n\021Ec" +
+      "hoResponseProto\022\017\n\007message\030\001 \002(\t\"\"\n\017OptR" +
+      "equestProto\022\017\n\007message\030\001 \001(\t\"#\n\020OptRespo" +
+      "nseProto\022\017\n\007message\030\001 \001(\t\")\n\021SleepReques" +
+      "tProto\022\024\n\014milliSeconds\030\001 \002(\005\"\024\n\022SleepRes" +
+      "ponseProto\"*\n\024SlowPingRequestProto\022\022\n\nsh" +
+      "ouldSlow\030\001 \002(\010\"$\n\021EchoRequestProto2\022\017\n\007m" +
+      "essage\030\001 \003(\t\"%\n\022EchoResponseProto2\022\017\n\007me",
+      "ssage\030\001 \003(\t\"1\n\017AddRequestProto\022\016\n\006param1" +
+      "\030\001 \002(\005\022\016\n\006param2\030\002 \002(\005\"\"\n\020AddRequestProt" +
+      "o2\022\016\n\006params\030\001 \003(\005\"\"\n\020AddResponseProto\022\016" +
+      "\n\006result\030\001 \002(\005\"&\n\024ExchangeRequestProto\022\016" +
+      "\n\006values\030\001 \003(\005\"\'\n\025ExchangeResponseProto\022" +
+      "\016\n\006values\030\001 \003(\005\">\n\027AuthMethodResponsePro" +
+      "to\022\014\n\004code\030\001 \002(\005\022\025\n\rmechanismName\030\002 \002(\t\"" +
+      "!\n\021UserResponseProto\022\014\n\004user\030\001 \002(\t\"(\n\022Sl" +
+      "eepRequestProto2\022\022\n\nsleep_time\030\001 \001(\003\"B\n\023" +
+      "SleepResponseProto2\022\024\n\014receive_time\030\001 \001(",
+      "\003\022\025\n\rresponse_time\030\002 \001(\003B5\n\036org.apache.h" +
+      "adoop.ipc.protobufB\020TestProtosLegacy\240\001\001"
+    };
+    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+        public com.google.protobuf.ExtensionRegistry assignDescriptors(
+            com.google.protobuf.Descriptors.FileDescriptor root) {
+          descriptor = root;
+          internal_static_hadoop_common_EmptyRequestProto_descriptor =
+            getDescriptor().getMessageTypes().get(0);
+          internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hadoop_common_EmptyRequestProto_descriptor,
+              new java.lang.String[] { });
+          internal_static_hadoop_common_EmptyResponseProto_descriptor =
+            getDescriptor().getMessageTypes().get(1);
+          internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hadoop_common_EmptyResponseProto_descriptor,
+              new java.lang.String[] { });
+          internal_static_hadoop_common_EchoRequestProto_descriptor =
+            getDescriptor().getMessageTypes().get(2);
+          internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hadoop_common_EchoRequestProto_descriptor,
+              new java.lang.String[] { "Message", });
+          internal_static_hadoop_common_EchoResponseProto_descriptor =
+            getDescriptor().getMessageTypes().get(3);
+          internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hadoop_common_EchoResponseProto_descriptor,
+              new java.lang.String[] { "Message", });
+          internal_static_hadoop_common_OptRequestProto_descriptor =
+            getDescriptor().getMessageTypes().get(4);
+          internal_static_hadoop_common_OptRequestProto_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hadoop_common_OptRequestProto_descriptor,
+              new java.lang.String[] { "Message", });
+          internal_static_hadoop_common_OptResponseProto_descriptor =
+            getDescriptor().getMessageTypes().get(5);
+          internal_static_hadoop_common_OptResponseProto_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hadoop_common_OptResponseProto_descriptor,
+              new java.lang.String[] { "Message", });
+          internal_static_hadoop_common_SleepRequestProto_descriptor =
+            getDescriptor().getMessageTypes().get(6);
+          internal_static_hadoop_common_SleepRequestProto_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hadoop_common_SleepRequestProto_descriptor,
+              new java.lang.String[] { "MilliSeconds", });
+          internal_static_hadoop_common_SleepResponseProto_descriptor =
+            getDescriptor().getMessageTypes().get(7);
+          internal_static_hadoop_common_SleepResponseProto_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hadoop_common_SleepResponseProto_descriptor,
+              new java.lang.String[] { });
+          internal_static_hadoop_common_SlowPingRequestProto_descriptor =
+            getDescriptor().getMessageTypes().get(8);
+          internal_static_hadoop_common_SlowPingRequestProto_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hadoop_common_SlowPingRequestProto_descriptor,
+              new java.lang.String[] { "ShouldSlow", });
+          internal_static_hadoop_common_EchoRequestProto2_descriptor =
+            getDescriptor().getMessageTypes().get(9);
+          internal_static_hadoop_common_EchoRequestProto2_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hadoop_common_EchoRequestProto2_descriptor,
+              new java.lang.String[] { "Message", });
+          internal_static_hadoop_common_EchoResponseProto2_descriptor =
+            getDescriptor().getMessageTypes().get(10);
+          internal_static_hadoop_common_EchoResponseProto2_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hadoop_common_EchoResponseProto2_descriptor,
+              new java.lang.String[] { "Message", });
+          internal_static_hadoop_common_AddRequestProto_descriptor =
+            getDescriptor().getMessageTypes().get(11);
+          internal_static_hadoop_common_AddRequestProto_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hadoop_common_AddRequestProto_descriptor,
+              new java.lang.String[] { "Param1", "Param2", });
+          internal_static_hadoop_common_AddRequestProto2_descriptor =
+            getDescriptor().getMessageTypes().get(12);
+          internal_static_hadoop_common_AddRequestProto2_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hadoop_common_AddRequestProto2_descriptor,
+              new java.lang.String[] { "Params", });
+          internal_static_hadoop_common_AddResponseProto_descriptor =
+            getDescriptor().getMessageTypes().get(13);
+          internal_static_hadoop_common_AddResponseProto_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hadoop_common_AddResponseProto_descriptor,
+              new java.lang.String[] { "Result", });
+          internal_static_hadoop_common_ExchangeRequestProto_descriptor =
+            getDescriptor().getMessageTypes().get(14);
+          internal_static_hadoop_common_ExchangeRequestProto_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hadoop_common_ExchangeRequestProto_descriptor,
+              new java.lang.String[] { "Values", });
+          internal_static_hadoop_common_ExchangeResponseProto_descriptor =
+            getDescriptor().getMessageTypes().get(15);
+          internal_static_hadoop_common_ExchangeResponseProto_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hadoop_common_ExchangeResponseProto_descriptor,
+              new java.lang.String[] { "Values", });
+          internal_static_hadoop_common_AuthMethodResponseProto_descriptor =
+            getDescriptor().getMessageTypes().get(16);
+          internal_static_hadoop_common_AuthMethodResponseProto_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hadoop_common_AuthMethodResponseProto_descriptor,
+              new java.lang.String[] { "Code", "MechanismName", });
+          internal_static_hadoop_common_UserResponseProto_descriptor =
+            getDescriptor().getMessageTypes().get(17);
+          internal_static_hadoop_common_UserResponseProto_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hadoop_common_UserResponseProto_descriptor,
+              new java.lang.String[] { "User", });
+          internal_static_hadoop_common_SleepRequestProto2_descriptor =
+            getDescriptor().getMessageTypes().get(18);
+          internal_static_hadoop_common_SleepRequestProto2_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hadoop_common_SleepRequestProto2_descriptor,
+              new java.lang.String[] { "SleepTime", });
+          internal_static_hadoop_common_SleepResponseProto2_descriptor =
+            getDescriptor().getMessageTypes().get(19);
+          internal_static_hadoop_common_SleepResponseProto2_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hadoop_common_SleepResponseProto2_descriptor,
+              new java.lang.String[] { "ReceiveTime", "ResponseTime", });
+          return null;
+        }
+      };
+    com.google.protobuf.Descriptors.FileDescriptor
+      .internalBuildGeneratedFileFrom(descriptorData,
+        new com.google.protobuf.Descriptors.FileDescriptor[] {
+        }, assigner);
+  }
+
+  // @@protoc_insertion_point(outer_class_scope)
+}

+ 3313 - 0
hadoop-common-project/hadoop-common/src/test/arm-java/org/apache/hadoop/ipc/protobuf/TestRpcServiceProtosLegacy.java

@@ -0,0 +1,3313 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// This is class is added to source because for arm protoc 2.5.0 executable
+// is not available to generate the same code.
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: test_rpc_service_legacy.proto
+
+package org.apache.hadoop.ipc.protobuf;
+
+public final class TestRpcServiceProtosLegacy {
+  private TestRpcServiceProtosLegacy() {}
+  public static void registerAllExtensions(
+      com.google.protobuf.ExtensionRegistry registry) {
+  }
+  /**
+   * Protobuf service {@code hadoop.common.TestProtobufRpcProto}
+   *
+   * <pre>
+   **
+   * A protobuf service for use in tests
+   * </pre>
+   */
+  public static abstract class TestProtobufRpcProto
+      implements com.google.protobuf.Service {
+    protected TestProtobufRpcProto() {}
+
+    public interface Interface {
+      /**
+       * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+       */
+      public abstract void ping(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+      /**
+       * <code>rpc echo(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);</code>
+       */
+      public abstract void echo(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto> done);
+
+      /**
+       * <code>rpc error(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+       */
+      public abstract void error(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+      /**
+       * <code>rpc error2(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+       */
+      public abstract void error2(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+      /**
+       * <code>rpc slowPing(.hadoop.common.SlowPingRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+       */
+      public abstract void slowPing(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+      /**
+       * <code>rpc echo2(.hadoop.common.EchoRequestProto2) returns (.hadoop.common.EchoResponseProto2);</code>
+       */
+      public abstract void echo2(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2> done);
+
+      /**
+       * <code>rpc add(.hadoop.common.AddRequestProto) returns (.hadoop.common.AddResponseProto);</code>
+       */
+      public abstract void add(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto> done);
+
+      /**
+       * <code>rpc add2(.hadoop.common.AddRequestProto2) returns (.hadoop.common.AddResponseProto);</code>
+       */
+      public abstract void add2(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto> done);
+
+      /**
+       * <code>rpc testServerGet(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+       */
+      public abstract void testServerGet(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+      /**
+       * <code>rpc exchange(.hadoop.common.ExchangeRequestProto) returns (.hadoop.common.ExchangeResponseProto);</code>
+       */
+      public abstract void exchange(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto> done);
+
+      /**
+       * <code>rpc sleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+       */
+      public abstract void sleep(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+      /**
+       * <code>rpc lockAndSleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+       */
+      public abstract void lockAndSleep(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+      /**
+       * <code>rpc getAuthMethod(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.AuthMethodResponseProto);</code>
+       */
+      public abstract void getAuthMethod(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto> done);
+
+      /**
+       * <code>rpc getAuthUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.UserResponseProto);</code>
+       */
+      public abstract void getAuthUser(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto> done);
+
+      /**
+       * <code>rpc echoPostponed(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);</code>
+       */
+      public abstract void echoPostponed(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto> done);
+
+      /**
+       * <code>rpc sendPostponed(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+       */
+      public abstract void sendPostponed(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+      /**
+       * <code>rpc getCurrentUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.UserResponseProto);</code>
+       */
+      public abstract void getCurrentUser(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto> done);
+
+      /**
+       * <code>rpc getServerRemoteUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.UserResponseProto);</code>
+       */
+      public abstract void getServerRemoteUser(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto> done);
+
+    }
+
+    public static com.google.protobuf.Service newReflectiveService(
+        final Interface impl) {
+      return new TestProtobufRpcProto() {
+        @java.lang.Override
+        public  void ping(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+          impl.ping(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void echo(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto> done) {
+          impl.echo(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void error(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+          impl.error(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void error2(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+          impl.error2(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void slowPing(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+          impl.slowPing(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void echo2(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2> done) {
+          impl.echo2(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void add(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto> done) {
+          impl.add(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void add2(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto> done) {
+          impl.add2(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void testServerGet(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+          impl.testServerGet(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void exchange(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto> done) {
+          impl.exchange(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void sleep(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+          impl.sleep(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void lockAndSleep(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+          impl.lockAndSleep(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void getAuthMethod(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto> done) {
+          impl.getAuthMethod(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void getAuthUser(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto> done) {
+          impl.getAuthUser(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void echoPostponed(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto> done) {
+          impl.echoPostponed(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void sendPostponed(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+          impl.sendPostponed(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void getCurrentUser(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto> done) {
+          impl.getCurrentUser(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void getServerRemoteUser(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto> done) {
+          impl.getServerRemoteUser(controller, request, done);
+        }
+
+      };
+    }
+
+    public static com.google.protobuf.BlockingService
+        newReflectiveBlockingService(final BlockingInterface impl) {
+      return new com.google.protobuf.BlockingService() {
+        public final com.google.protobuf.Descriptors.ServiceDescriptor
+            getDescriptorForType() {
+          return getDescriptor();
+        }
+
+        public final com.google.protobuf.Message callBlockingMethod(
+            com.google.protobuf.Descriptors.MethodDescriptor method,
+            com.google.protobuf.RpcController controller,
+            com.google.protobuf.Message request)
+            throws com.google.protobuf.ServiceException {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.callBlockingMethod() given method descriptor for " +
+              "wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+            case 1:
+              return impl.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto)request);
+            case 2:
+              return impl.error(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+            case 3:
+              return impl.error2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+            case 4:
+              return impl.slowPing(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto)request);
+            case 5:
+              return impl.echo2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2)request);
+            case 6:
+              return impl.add(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto)request);
+            case 7:
+              return impl.add2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2)request);
+            case 8:
+              return impl.testServerGet(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+            case 9:
+              return impl.exchange(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto)request);
+            case 10:
+              return impl.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto)request);
+            case 11:
+              return impl.lockAndSleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto)request);
+            case 12:
+              return impl.getAuthMethod(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+            case 13:
+              return impl.getAuthUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+            case 14:
+              return impl.echoPostponed(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto)request);
+            case 15:
+              return impl.sendPostponed(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+            case 16:
+              return impl.getCurrentUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+            case 17:
+              return impl.getServerRemoteUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+
+        public final com.google.protobuf.Message
+            getRequestPrototype(
+            com.google.protobuf.Descriptors.MethodDescriptor method) {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.getRequestPrototype() given method " +
+              "descriptor for wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+            case 1:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.getDefaultInstance();
+            case 2:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+            case 3:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+            case 4:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto.getDefaultInstance();
+            case 5:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2.getDefaultInstance();
+            case 6:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto.getDefaultInstance();
+            case 7:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2.getDefaultInstance();
+            case 8:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+            case 9:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto.getDefaultInstance();
+            case 10:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.getDefaultInstance();
+            case 11:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.getDefaultInstance();
+            case 12:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+            case 13:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+            case 14:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.getDefaultInstance();
+            case 15:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+            case 16:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+            case 17:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+
+        public final com.google.protobuf.Message
+            getResponsePrototype(
+            com.google.protobuf.Descriptors.MethodDescriptor method) {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.getResponsePrototype() given method " +
+              "descriptor for wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+            case 1:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance();
+            case 2:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+            case 3:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+            case 4:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+            case 5:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.getDefaultInstance();
+            case 6:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance();
+            case 7:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance();
+            case 8:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+            case 9:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.getDefaultInstance();
+            case 10:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+            case 11:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+            case 12:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.getDefaultInstance();
+            case 13:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance();
+            case 14:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance();
+            case 15:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+            case 16:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance();
+            case 17:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance();
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+
+      };
+    }
+
+    /**
+     * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+     */
+    public abstract void ping(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+    /**
+     * <code>rpc echo(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);</code>
+     */
+    public abstract void echo(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto> done);
+
+    /**
+     * <code>rpc error(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+     */
+    public abstract void error(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+    /**
+     * <code>rpc error2(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+     */
+    public abstract void error2(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+    /**
+     * <code>rpc slowPing(.hadoop.common.SlowPingRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+     */
+    public abstract void slowPing(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+    /**
+     * <code>rpc echo2(.hadoop.common.EchoRequestProto2) returns (.hadoop.common.EchoResponseProto2);</code>
+     */
+    public abstract void echo2(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2> done);
+
+    /**
+     * <code>rpc add(.hadoop.common.AddRequestProto) returns (.hadoop.common.AddResponseProto);</code>
+     */
+    public abstract void add(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto> done);
+
+    /**
+     * <code>rpc add2(.hadoop.common.AddRequestProto2) returns (.hadoop.common.AddResponseProto);</code>
+     */
+    public abstract void add2(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto> done);
+
+    /**
+     * <code>rpc testServerGet(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+     */
+    public abstract void testServerGet(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+    /**
+     * <code>rpc exchange(.hadoop.common.ExchangeRequestProto) returns (.hadoop.common.ExchangeResponseProto);</code>
+     */
+    public abstract void exchange(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto> done);
+
+    /**
+     * <code>rpc sleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+     */
+    public abstract void sleep(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+    /**
+     * <code>rpc lockAndSleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+     */
+    public abstract void lockAndSleep(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+    /**
+     * <code>rpc getAuthMethod(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.AuthMethodResponseProto);</code>
+     */
+    public abstract void getAuthMethod(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto> done);
+
+    /**
+     * <code>rpc getAuthUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.UserResponseProto);</code>
+     */
+    public abstract void getAuthUser(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto> done);
+
+    /**
+     * <code>rpc echoPostponed(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);</code>
+     */
+    public abstract void echoPostponed(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto> done);
+
+    /**
+     * <code>rpc sendPostponed(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+     */
+    public abstract void sendPostponed(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+    /**
+     * <code>rpc getCurrentUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.UserResponseProto);</code>
+     */
+    public abstract void getCurrentUser(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto> done);
+
+    /**
+     * <code>rpc getServerRemoteUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.UserResponseProto);</code>
+     */
+    public abstract void getServerRemoteUser(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto> done);
+
+    public static final
+        com.google.protobuf.Descriptors.ServiceDescriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.getDescriptor().getServices().get(0);
+    }
+    public final com.google.protobuf.Descriptors.ServiceDescriptor
+        getDescriptorForType() {
+      return getDescriptor();
+    }
+
+    public final void callMethod(
+        com.google.protobuf.Descriptors.MethodDescriptor method,
+        com.google.protobuf.RpcController controller,
+        com.google.protobuf.Message request,
+        com.google.protobuf.RpcCallback<
+          com.google.protobuf.Message> done) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.callMethod() given method descriptor for wrong " +
+          "service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
+              done));
+          return;
+        case 1:
+          this.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto>specializeCallback(
+              done));
+          return;
+        case 2:
+          this.error(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
+              done));
+          return;
+        case 3:
+          this.error2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
+              done));
+          return;
+        case 4:
+          this.slowPing(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
+              done));
+          return;
+        case 5:
+          this.echo2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2>specializeCallback(
+              done));
+          return;
+        case 6:
+          this.add(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto>specializeCallback(
+              done));
+          return;
+        case 7:
+          this.add2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto>specializeCallback(
+              done));
+          return;
+        case 8:
+          this.testServerGet(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
+              done));
+          return;
+        case 9:
+          this.exchange(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto>specializeCallback(
+              done));
+          return;
+        case 10:
+          this.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
+              done));
+          return;
+        case 11:
+          this.lockAndSleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
+              done));
+          return;
+        case 12:
+          this.getAuthMethod(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto>specializeCallback(
+              done));
+          return;
+        case 13:
+          this.getAuthUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto>specializeCallback(
+              done));
+          return;
+        case 14:
+          this.echoPostponed(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto>specializeCallback(
+              done));
+          return;
+        case 15:
+          this.sendPostponed(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
+              done));
+          return;
+        case 16:
+          this.getCurrentUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto>specializeCallback(
+              done));
+          return;
+        case 17:
+          this.getServerRemoteUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto>specializeCallback(
+              done));
+          return;
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+
+    public final com.google.protobuf.Message
+        getRequestPrototype(
+        com.google.protobuf.Descriptors.MethodDescriptor method) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.getRequestPrototype() given method " +
+          "descriptor for wrong service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+        case 1:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.getDefaultInstance();
+        case 2:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+        case 3:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+        case 4:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto.getDefaultInstance();
+        case 5:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2.getDefaultInstance();
+        case 6:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto.getDefaultInstance();
+        case 7:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2.getDefaultInstance();
+        case 8:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+        case 9:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto.getDefaultInstance();
+        case 10:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.getDefaultInstance();
+        case 11:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.getDefaultInstance();
+        case 12:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+        case 13:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+        case 14:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.getDefaultInstance();
+        case 15:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+        case 16:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+        case 17:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+
+    public final com.google.protobuf.Message
+        getResponsePrototype(
+        com.google.protobuf.Descriptors.MethodDescriptor method) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.getResponsePrototype() given method " +
+          "descriptor for wrong service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+        case 1:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance();
+        case 2:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+        case 3:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+        case 4:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+        case 5:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.getDefaultInstance();
+        case 6:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance();
+        case 7:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance();
+        case 8:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+        case 9:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.getDefaultInstance();
+        case 10:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+        case 11:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+        case 12:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.getDefaultInstance();
+        case 13:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance();
+        case 14:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance();
+        case 15:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+        case 16:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance();
+        case 17:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance();
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+
+    public static Stub newStub(
+        com.google.protobuf.RpcChannel channel) {
+      return new Stub(channel);
+    }
+
+    public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.TestProtobufRpcProto implements Interface {
+      private Stub(com.google.protobuf.RpcChannel channel) {
+        this.channel = channel;
+      }
+
+      private final com.google.protobuf.RpcChannel channel;
+
+      public com.google.protobuf.RpcChannel getChannel() {
+        return channel;
+      }
+
+      public  void ping(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(0),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+      }
+
+      public  void echo(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(1),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance()));
+      }
+
+      public  void error(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(2),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+      }
+
+      public  void error2(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(3),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+      }
+
+      public  void slowPing(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(4),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+      }
+
+      public  void echo2(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(5),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.getDefaultInstance()));
+      }
+
+      public  void add(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(6),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance()));
+      }
+
+      public  void add2(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(7),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance()));
+      }
+
+      public  void testServerGet(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(8),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+      }
+
+      public  void exchange(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(9),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.getDefaultInstance()));
+      }
+
+      public  void sleep(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(10),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+      }
+
+      public  void lockAndSleep(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(11),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+      }
+
+      public  void getAuthMethod(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(12),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.getDefaultInstance()));
+      }
+
+      public  void getAuthUser(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(13),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance()));
+      }
+
+      public  void echoPostponed(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(14),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance()));
+      }
+
+      public  void sendPostponed(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(15),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+      }
+
+      public  void getCurrentUser(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(16),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance()));
+      }
+
+      public  void getServerRemoteUser(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(17),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance()));
+      }
+    }
+
+    public static BlockingInterface newBlockingStub(
+        com.google.protobuf.BlockingRpcChannel channel) {
+      return new BlockingStub(channel);
+    }
+
+    public interface BlockingInterface {
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto echo(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto error(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto error2(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto slowPing(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 echo2(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto add(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto add2(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto testServerGet(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto exchange(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto sleep(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto lockAndSleep(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto getAuthMethod(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto getAuthUser(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto echoPostponed(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto sendPostponed(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto getCurrentUser(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto getServerRemoteUser(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException;
+    }
+
+    private static final class BlockingStub implements BlockingInterface {
+      private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
+        this.channel = channel;
+      }
+
+      private final com.google.protobuf.BlockingRpcChannel channel;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(0),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto echo(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(1),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto error(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(2),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto error2(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(3),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto slowPing(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(4),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 echo2(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(5),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto add(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(6),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto add2(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(7),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto testServerGet(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(8),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto exchange(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(9),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto sleep(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(10),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto lockAndSleep(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(11),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto getAuthMethod(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(12),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto getAuthUser(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(13),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto echoPostponed(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(14),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto sendPostponed(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(15),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto getCurrentUser(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(16),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto getServerRemoteUser(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(17),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance());
+      }
+
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.TestProtobufRpcProto)
+  }
+
+  /**
+   * Protobuf service {@code hadoop.common.TestProtobufRpc2Proto}
+   */
+  public static abstract class TestProtobufRpc2Proto
+      implements com.google.protobuf.Service {
+    protected TestProtobufRpc2Proto() {}
+
+    public interface Interface {
+      /**
+       * <code>rpc ping2(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+       */
+      public abstract void ping2(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+      /**
+       * <code>rpc echo2(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);</code>
+       */
+      public abstract void echo2(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto> done);
+
+      /**
+       * <code>rpc sleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.SleepResponseProto);</code>
+       */
+      public abstract void sleep(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto> done);
+
+    }
+
+    public static com.google.protobuf.Service newReflectiveService(
+        final Interface impl) {
+      return new TestProtobufRpc2Proto() {
+        @java.lang.Override
+        public  void ping2(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+          impl.ping2(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void echo2(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto> done) {
+          impl.echo2(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void sleep(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto> done) {
+          impl.sleep(controller, request, done);
+        }
+
+      };
+    }
+
+    public static com.google.protobuf.BlockingService
+        newReflectiveBlockingService(final BlockingInterface impl) {
+      return new com.google.protobuf.BlockingService() {
+        public final com.google.protobuf.Descriptors.ServiceDescriptor
+            getDescriptorForType() {
+          return getDescriptor();
+        }
+
+        public final com.google.protobuf.Message callBlockingMethod(
+            com.google.protobuf.Descriptors.MethodDescriptor method,
+            com.google.protobuf.RpcController controller,
+            com.google.protobuf.Message request)
+            throws com.google.protobuf.ServiceException {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.callBlockingMethod() given method descriptor for " +
+              "wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return impl.ping2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+            case 1:
+              return impl.echo2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto)request);
+            case 2:
+              return impl.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto)request);
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+
+        public final com.google.protobuf.Message
+            getRequestPrototype(
+            com.google.protobuf.Descriptors.MethodDescriptor method) {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.getRequestPrototype() given method " +
+              "descriptor for wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+            case 1:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.getDefaultInstance();
+            case 2:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.getDefaultInstance();
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+
+        public final com.google.protobuf.Message
+            getResponsePrototype(
+            com.google.protobuf.Descriptors.MethodDescriptor method) {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.getResponsePrototype() given method " +
+              "descriptor for wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+            case 1:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance();
+            case 2:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.getDefaultInstance();
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+
+      };
+    }
+
+    /**
+     * <code>rpc ping2(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+     */
+    public abstract void ping2(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+    /**
+     * <code>rpc echo2(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);</code>
+     */
+    public abstract void echo2(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto> done);
+
+    /**
+     * <code>rpc sleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.SleepResponseProto);</code>
+     */
+    public abstract void sleep(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto> done);
+
+    public static final
+        com.google.protobuf.Descriptors.ServiceDescriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.getDescriptor().getServices().get(1);
+    }
+    public final com.google.protobuf.Descriptors.ServiceDescriptor
+        getDescriptorForType() {
+      return getDescriptor();
+    }
+
+    public final void callMethod(
+        com.google.protobuf.Descriptors.MethodDescriptor method,
+        com.google.protobuf.RpcController controller,
+        com.google.protobuf.Message request,
+        com.google.protobuf.RpcCallback<
+          com.google.protobuf.Message> done) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.callMethod() given method descriptor for wrong " +
+          "service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          this.ping2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
+              done));
+          return;
+        case 1:
+          this.echo2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto>specializeCallback(
+              done));
+          return;
+        case 2:
+          this.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto>specializeCallback(
+              done));
+          return;
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+
+    public final com.google.protobuf.Message
+        getRequestPrototype(
+        com.google.protobuf.Descriptors.MethodDescriptor method) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.getRequestPrototype() given method " +
+          "descriptor for wrong service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+        case 1:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.getDefaultInstance();
+        case 2:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.getDefaultInstance();
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+
+    public final com.google.protobuf.Message
+        getResponsePrototype(
+        com.google.protobuf.Descriptors.MethodDescriptor method) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.getResponsePrototype() given method " +
+          "descriptor for wrong service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+        case 1:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance();
+        case 2:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.getDefaultInstance();
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+
+    public static Stub newStub(
+        com.google.protobuf.RpcChannel channel) {
+      return new Stub(channel);
+    }
+
+    public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.TestProtobufRpc2Proto implements Interface {
+      private Stub(com.google.protobuf.RpcChannel channel) {
+        this.channel = channel;
+      }
+
+      private final com.google.protobuf.RpcChannel channel;
+
+      public com.google.protobuf.RpcChannel getChannel() {
+        return channel;
+      }
+
+      public  void ping2(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(0),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+      }
+
+      public  void echo2(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(1),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance()));
+      }
+
+      public  void sleep(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(2),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.getDefaultInstance()));
+      }
+    }
+
+    public static BlockingInterface newBlockingStub(
+        com.google.protobuf.BlockingRpcChannel channel) {
+      return new BlockingStub(channel);
+    }
+
+    public interface BlockingInterface {
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping2(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto echo2(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto sleep(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request)
+          throws com.google.protobuf.ServiceException;
+    }
+
+    private static final class BlockingStub implements BlockingInterface {
+      private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
+        this.channel = channel;
+      }
+
+      private final com.google.protobuf.BlockingRpcChannel channel;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping2(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(0),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto echo2(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(1),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto sleep(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(2),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.getDefaultInstance());
+      }
+
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.TestProtobufRpc2Proto)
+  }
+
+  /**
+   * Protobuf service {@code hadoop.common.OldProtobufRpcProto}
+   */
+  public static abstract class OldProtobufRpcProto
+      implements com.google.protobuf.Service {
+    protected OldProtobufRpcProto() {}
+
+    public interface Interface {
+      /**
+       * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+       */
+      public abstract void ping(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+      /**
+       * <code>rpc echo(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+       */
+      public abstract void echo(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+    }
+
+    public static com.google.protobuf.Service newReflectiveService(
+        final Interface impl) {
+      return new OldProtobufRpcProto() {
+        @java.lang.Override
+        public  void ping(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+          impl.ping(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void echo(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+          impl.echo(controller, request, done);
+        }
+
+      };
+    }
+
+    public static com.google.protobuf.BlockingService
+        newReflectiveBlockingService(final BlockingInterface impl) {
+      return new com.google.protobuf.BlockingService() {
+        public final com.google.protobuf.Descriptors.ServiceDescriptor
+            getDescriptorForType() {
+          return getDescriptor();
+        }
+
+        public final com.google.protobuf.Message callBlockingMethod(
+            com.google.protobuf.Descriptors.MethodDescriptor method,
+            com.google.protobuf.RpcController controller,
+            com.google.protobuf.Message request)
+            throws com.google.protobuf.ServiceException {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.callBlockingMethod() given method descriptor for " +
+              "wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+            case 1:
+              return impl.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+
+        public final com.google.protobuf.Message
+            getRequestPrototype(
+            com.google.protobuf.Descriptors.MethodDescriptor method) {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.getRequestPrototype() given method " +
+              "descriptor for wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+            case 1:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+
+        public final com.google.protobuf.Message
+            getResponsePrototype(
+            com.google.protobuf.Descriptors.MethodDescriptor method) {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.getResponsePrototype() given method " +
+              "descriptor for wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+            case 1:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+
+      };
+    }
+
+    /**
+     * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+     */
+    public abstract void ping(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+    /**
+     * <code>rpc echo(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+     */
+    public abstract void echo(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+    public static final
+        com.google.protobuf.Descriptors.ServiceDescriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.getDescriptor().getServices().get(2);
+    }
+    public final com.google.protobuf.Descriptors.ServiceDescriptor
+        getDescriptorForType() {
+      return getDescriptor();
+    }
+
+    public final void callMethod(
+        com.google.protobuf.Descriptors.MethodDescriptor method,
+        com.google.protobuf.RpcController controller,
+        com.google.protobuf.Message request,
+        com.google.protobuf.RpcCallback<
+          com.google.protobuf.Message> done) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.callMethod() given method descriptor for wrong " +
+          "service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
+              done));
+          return;
+        case 1:
+          this.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
+              done));
+          return;
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+
+    public final com.google.protobuf.Message
+        getRequestPrototype(
+        com.google.protobuf.Descriptors.MethodDescriptor method) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.getRequestPrototype() given method " +
+          "descriptor for wrong service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+        case 1:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+
+    public final com.google.protobuf.Message
+        getResponsePrototype(
+        com.google.protobuf.Descriptors.MethodDescriptor method) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.getResponsePrototype() given method " +
+          "descriptor for wrong service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+        case 1:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+
+    public static Stub newStub(
+        com.google.protobuf.RpcChannel channel) {
+      return new Stub(channel);
+    }
+
+    public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.OldProtobufRpcProto implements Interface {
+      private Stub(com.google.protobuf.RpcChannel channel) {
+        this.channel = channel;
+      }
+
+      private final com.google.protobuf.RpcChannel channel;
+
+      public com.google.protobuf.RpcChannel getChannel() {
+        return channel;
+      }
+
+      public  void ping(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(0),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+      }
+
+      public  void echo(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(1),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+      }
+    }
+
+    public static BlockingInterface newBlockingStub(
+        com.google.protobuf.BlockingRpcChannel channel) {
+      return new BlockingStub(channel);
+    }
+
+    public interface BlockingInterface {
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto echo(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException;
+    }
+
+    private static final class BlockingStub implements BlockingInterface {
+      private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
+        this.channel = channel;
+      }
+
+      private final com.google.protobuf.BlockingRpcChannel channel;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(0),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto echo(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(1),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+      }
+
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.OldProtobufRpcProto)
+  }
+
+  /**
+   * Protobuf service {@code hadoop.common.NewProtobufRpcProto}
+   */
+  public static abstract class NewProtobufRpcProto
+      implements com.google.protobuf.Service {
+    protected NewProtobufRpcProto() {}
+
+    public interface Interface {
+      /**
+       * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+       */
+      public abstract void ping(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+      /**
+       * <code>rpc echo(.hadoop.common.OptRequestProto) returns (.hadoop.common.OptResponseProto);</code>
+       */
+      public abstract void echo(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto> done);
+
+    }
+
+    public static com.google.protobuf.Service newReflectiveService(
+        final Interface impl) {
+      return new NewProtobufRpcProto() {
+        @java.lang.Override
+        public  void ping(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+          impl.ping(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void echo(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto> done) {
+          impl.echo(controller, request, done);
+        }
+
+      };
+    }
+
+    public static com.google.protobuf.BlockingService
+        newReflectiveBlockingService(final BlockingInterface impl) {
+      return new com.google.protobuf.BlockingService() {
+        public final com.google.protobuf.Descriptors.ServiceDescriptor
+            getDescriptorForType() {
+          return getDescriptor();
+        }
+
+        public final com.google.protobuf.Message callBlockingMethod(
+            com.google.protobuf.Descriptors.MethodDescriptor method,
+            com.google.protobuf.RpcController controller,
+            com.google.protobuf.Message request)
+            throws com.google.protobuf.ServiceException {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.callBlockingMethod() given method descriptor for " +
+              "wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+            case 1:
+              return impl.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto)request);
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+
+        public final com.google.protobuf.Message
+            getRequestPrototype(
+            com.google.protobuf.Descriptors.MethodDescriptor method) {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.getRequestPrototype() given method " +
+              "descriptor for wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+            case 1:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto.getDefaultInstance();
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+
+        public final com.google.protobuf.Message
+            getResponsePrototype(
+            com.google.protobuf.Descriptors.MethodDescriptor method) {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.getResponsePrototype() given method " +
+              "descriptor for wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+            case 1:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.getDefaultInstance();
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+
+      };
+    }
+
+    /**
+     * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+     */
+    public abstract void ping(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+    /**
+     * <code>rpc echo(.hadoop.common.OptRequestProto) returns (.hadoop.common.OptResponseProto);</code>
+     */
+    public abstract void echo(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto> done);
+
+    public static final
+        com.google.protobuf.Descriptors.ServiceDescriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.getDescriptor().getServices().get(3);
+    }
+    public final com.google.protobuf.Descriptors.ServiceDescriptor
+        getDescriptorForType() {
+      return getDescriptor();
+    }
+
+    public final void callMethod(
+        com.google.protobuf.Descriptors.MethodDescriptor method,
+        com.google.protobuf.RpcController controller,
+        com.google.protobuf.Message request,
+        com.google.protobuf.RpcCallback<
+          com.google.protobuf.Message> done) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.callMethod() given method descriptor for wrong " +
+          "service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
+              done));
+          return;
+        case 1:
+          this.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto>specializeCallback(
+              done));
+          return;
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+
+    public final com.google.protobuf.Message
+        getRequestPrototype(
+        com.google.protobuf.Descriptors.MethodDescriptor method) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.getRequestPrototype() given method " +
+          "descriptor for wrong service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+        case 1:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto.getDefaultInstance();
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+
+    public final com.google.protobuf.Message
+        getResponsePrototype(
+        com.google.protobuf.Descriptors.MethodDescriptor method) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.getResponsePrototype() given method " +
+          "descriptor for wrong service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+        case 1:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.getDefaultInstance();
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+
+    public static Stub newStub(
+        com.google.protobuf.RpcChannel channel) {
+      return new Stub(channel);
+    }
+
+    public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.NewProtobufRpcProto implements Interface {
+      private Stub(com.google.protobuf.RpcChannel channel) {
+        this.channel = channel;
+      }
+
+      private final com.google.protobuf.RpcChannel channel;
+
+      public com.google.protobuf.RpcChannel getChannel() {
+        return channel;
+      }
+
+      public  void ping(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(0),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+      }
+
+      public  void echo(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(1),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.getDefaultInstance()));
+      }
+    }
+
+    public static BlockingInterface newBlockingStub(
+        com.google.protobuf.BlockingRpcChannel channel) {
+      return new BlockingStub(channel);
+    }
+
+    public interface BlockingInterface {
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto echo(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto request)
+          throws com.google.protobuf.ServiceException;
+    }
+
+    private static final class BlockingStub implements BlockingInterface {
+      private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
+        this.channel = channel;
+      }
+
+      private final com.google.protobuf.BlockingRpcChannel channel;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(0),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto echo(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(1),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.getDefaultInstance());
+      }
+
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.NewProtobufRpcProto)
+  }
+
+  /**
+   * Protobuf service {@code hadoop.common.NewerProtobufRpcProto}
+   */
+  public static abstract class NewerProtobufRpcProto
+      implements com.google.protobuf.Service {
+    protected NewerProtobufRpcProto() {}
+
+    public interface Interface {
+      /**
+       * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+       */
+      public abstract void ping(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+      /**
+       * <code>rpc echo(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+       */
+      public abstract void echo(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+    }
+
+    public static com.google.protobuf.Service newReflectiveService(
+        final Interface impl) {
+      return new NewerProtobufRpcProto() {
+        @java.lang.Override
+        public  void ping(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+          impl.ping(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void echo(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+          impl.echo(controller, request, done);
+        }
+
+      };
+    }
+
+    public static com.google.protobuf.BlockingService
+        newReflectiveBlockingService(final BlockingInterface impl) {
+      return new com.google.protobuf.BlockingService() {
+        public final com.google.protobuf.Descriptors.ServiceDescriptor
+            getDescriptorForType() {
+          return getDescriptor();
+        }
+
+        public final com.google.protobuf.Message callBlockingMethod(
+            com.google.protobuf.Descriptors.MethodDescriptor method,
+            com.google.protobuf.RpcController controller,
+            com.google.protobuf.Message request)
+            throws com.google.protobuf.ServiceException {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.callBlockingMethod() given method descriptor for " +
+              "wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+            case 1:
+              return impl.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+
+        public final com.google.protobuf.Message
+            getRequestPrototype(
+            com.google.protobuf.Descriptors.MethodDescriptor method) {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.getRequestPrototype() given method " +
+              "descriptor for wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+            case 1:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+
+        public final com.google.protobuf.Message
+            getResponsePrototype(
+            com.google.protobuf.Descriptors.MethodDescriptor method) {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.getResponsePrototype() given method " +
+              "descriptor for wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+            case 1:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+
+      };
+    }
+
+    /**
+     * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+     */
+    public abstract void ping(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+    /**
+     * <code>rpc echo(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+     */
+    public abstract void echo(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+    public static final
+        com.google.protobuf.Descriptors.ServiceDescriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.getDescriptor().getServices().get(4);
+    }
+    public final com.google.protobuf.Descriptors.ServiceDescriptor
+        getDescriptorForType() {
+      return getDescriptor();
+    }
+
+    public final void callMethod(
+        com.google.protobuf.Descriptors.MethodDescriptor method,
+        com.google.protobuf.RpcController controller,
+        com.google.protobuf.Message request,
+        com.google.protobuf.RpcCallback<
+          com.google.protobuf.Message> done) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.callMethod() given method descriptor for wrong " +
+          "service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
+              done));
+          return;
+        case 1:
+          this.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
+              done));
+          return;
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+
+    public final com.google.protobuf.Message
+        getRequestPrototype(
+        com.google.protobuf.Descriptors.MethodDescriptor method) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.getRequestPrototype() given method " +
+          "descriptor for wrong service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+        case 1:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+
+    public final com.google.protobuf.Message
+        getResponsePrototype(
+        com.google.protobuf.Descriptors.MethodDescriptor method) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.getResponsePrototype() given method " +
+          "descriptor for wrong service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+        case 1:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+
+    public static Stub newStub(
+        com.google.protobuf.RpcChannel channel) {
+      return new Stub(channel);
+    }
+
+    public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.NewerProtobufRpcProto implements Interface {
+      private Stub(com.google.protobuf.RpcChannel channel) {
+        this.channel = channel;
+      }
+
+      private final com.google.protobuf.RpcChannel channel;
+
+      public com.google.protobuf.RpcChannel getChannel() {
+        return channel;
+      }
+
+      public  void ping(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(0),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+      }
+
+      public  void echo(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(1),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+      }
+    }
+
+    public static BlockingInterface newBlockingStub(
+        com.google.protobuf.BlockingRpcChannel channel) {
+      return new BlockingStub(channel);
+    }
+
+    public interface BlockingInterface {
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto echo(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException;
+    }
+
+    private static final class BlockingStub implements BlockingInterface {
+      private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
+        this.channel = channel;
+      }
+
+      private final com.google.protobuf.BlockingRpcChannel channel;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(0),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto echo(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(1),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+      }
+
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.NewerProtobufRpcProto)
+  }
+
+  /**
+   * Protobuf service {@code hadoop.common.CustomProto}
+   */
+  public static abstract class CustomProto
+      implements com.google.protobuf.Service {
+    protected CustomProto() {}
+
+    public interface Interface {
+      /**
+       * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+       */
+      public abstract void ping(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+    }
+
+    public static com.google.protobuf.Service newReflectiveService(
+        final Interface impl) {
+      return new CustomProto() {
+        @java.lang.Override
+        public  void ping(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+          impl.ping(controller, request, done);
+        }
+
+      };
+    }
+
+    public static com.google.protobuf.BlockingService
+        newReflectiveBlockingService(final BlockingInterface impl) {
+      return new com.google.protobuf.BlockingService() {
+        public final com.google.protobuf.Descriptors.ServiceDescriptor
+            getDescriptorForType() {
+          return getDescriptor();
+        }
+
+        public final com.google.protobuf.Message callBlockingMethod(
+            com.google.protobuf.Descriptors.MethodDescriptor method,
+            com.google.protobuf.RpcController controller,
+            com.google.protobuf.Message request)
+            throws com.google.protobuf.ServiceException {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.callBlockingMethod() given method descriptor for " +
+              "wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+
+        public final com.google.protobuf.Message
+            getRequestPrototype(
+            com.google.protobuf.Descriptors.MethodDescriptor method) {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.getRequestPrototype() given method " +
+              "descriptor for wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+
+        public final com.google.protobuf.Message
+            getResponsePrototype(
+            com.google.protobuf.Descriptors.MethodDescriptor method) {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.getResponsePrototype() given method " +
+              "descriptor for wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+
+      };
+    }
+
+    /**
+     * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
+     */
+    public abstract void ping(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);
+
+    public static final
+        com.google.protobuf.Descriptors.ServiceDescriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.getDescriptor().getServices().get(5);
+    }
+    public final com.google.protobuf.Descriptors.ServiceDescriptor
+        getDescriptorForType() {
+      return getDescriptor();
+    }
+
+    public final void callMethod(
+        com.google.protobuf.Descriptors.MethodDescriptor method,
+        com.google.protobuf.RpcController controller,
+        com.google.protobuf.Message request,
+        com.google.protobuf.RpcCallback<
+          com.google.protobuf.Message> done) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.callMethod() given method descriptor for wrong " +
+          "service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
+              done));
+          return;
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+
+    public final com.google.protobuf.Message
+        getRequestPrototype(
+        com.google.protobuf.Descriptors.MethodDescriptor method) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.getRequestPrototype() given method " +
+          "descriptor for wrong service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+
+    public final com.google.protobuf.Message
+        getResponsePrototype(
+        com.google.protobuf.Descriptors.MethodDescriptor method) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.getResponsePrototype() given method " +
+          "descriptor for wrong service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+
+    public static Stub newStub(
+        com.google.protobuf.RpcChannel channel) {
+      return new Stub(channel);
+    }
+
+    public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.CustomProto implements Interface {
+      private Stub(com.google.protobuf.RpcChannel channel) {
+        this.channel = channel;
+      }
+
+      private final com.google.protobuf.RpcChannel channel;
+
+      public com.google.protobuf.RpcChannel getChannel() {
+        return channel;
+      }
+
+      public  void ping(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(0),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+      }
+    }
+
+    public static BlockingInterface newBlockingStub(
+        com.google.protobuf.BlockingRpcChannel channel) {
+      return new BlockingStub(channel);
+    }
+
+    public interface BlockingInterface {
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException;
+    }
+
+    private static final class BlockingStub implements BlockingInterface {
+      private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
+        this.channel = channel;
+      }
+
+      private final com.google.protobuf.BlockingRpcChannel channel;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(0),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+      }
+
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.CustomProto)
+  }
+
+  /**
+   * Protobuf service {@code hadoop.common.TestProtobufRpcHandoffProto}
+   */
+  public static abstract class TestProtobufRpcHandoffProto
+      implements com.google.protobuf.Service {
+    protected TestProtobufRpcHandoffProto() {}
+
+    public interface Interface {
+      /**
+       * <code>rpc sleep(.hadoop.common.SleepRequestProto2) returns (.hadoop.common.SleepResponseProto2);</code>
+       */
+      public abstract void sleep(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2> done);
+
+    }
+
+    public static com.google.protobuf.Service newReflectiveService(
+        final Interface impl) {
+      return new TestProtobufRpcHandoffProto() {
+        @java.lang.Override
+        public  void sleep(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2> done) {
+          impl.sleep(controller, request, done);
+        }
+
+      };
+    }
+
+    public static com.google.protobuf.BlockingService
+        newReflectiveBlockingService(final BlockingInterface impl) {
+      return new com.google.protobuf.BlockingService() {
+        public final com.google.protobuf.Descriptors.ServiceDescriptor
+            getDescriptorForType() {
+          return getDescriptor();
+        }
+
+        public final com.google.protobuf.Message callBlockingMethod(
+            com.google.protobuf.Descriptors.MethodDescriptor method,
+            com.google.protobuf.RpcController controller,
+            com.google.protobuf.Message request)
+            throws com.google.protobuf.ServiceException {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.callBlockingMethod() given method descriptor for " +
+              "wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return impl.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2)request);
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+
+        public final com.google.protobuf.Message
+            getRequestPrototype(
+            com.google.protobuf.Descriptors.MethodDescriptor method) {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.getRequestPrototype() given method " +
+              "descriptor for wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2.getDefaultInstance();
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+
+        public final com.google.protobuf.Message
+            getResponsePrototype(
+            com.google.protobuf.Descriptors.MethodDescriptor method) {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.getResponsePrototype() given method " +
+              "descriptor for wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.getDefaultInstance();
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+
+      };
+    }
+
+    /**
+     * <code>rpc sleep(.hadoop.common.SleepRequestProto2) returns (.hadoop.common.SleepResponseProto2);</code>
+     */
+    public abstract void sleep(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2> done);
+
+    public static final
+        com.google.protobuf.Descriptors.ServiceDescriptor
+        getDescriptor() {
+      return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.getDescriptor().getServices().get(6);
+    }
+    public final com.google.protobuf.Descriptors.ServiceDescriptor
+        getDescriptorForType() {
+      return getDescriptor();
+    }
+
+    public final void callMethod(
+        com.google.protobuf.Descriptors.MethodDescriptor method,
+        com.google.protobuf.RpcController controller,
+        com.google.protobuf.Message request,
+        com.google.protobuf.RpcCallback<
+          com.google.protobuf.Message> done) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.callMethod() given method descriptor for wrong " +
+          "service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          this.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2>specializeCallback(
+              done));
+          return;
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+
+    public final com.google.protobuf.Message
+        getRequestPrototype(
+        com.google.protobuf.Descriptors.MethodDescriptor method) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.getRequestPrototype() given method " +
+          "descriptor for wrong service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2.getDefaultInstance();
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+
+    public final com.google.protobuf.Message
+        getResponsePrototype(
+        com.google.protobuf.Descriptors.MethodDescriptor method) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.getResponsePrototype() given method " +
+          "descriptor for wrong service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.getDefaultInstance();
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+
+    public static Stub newStub(
+        com.google.protobuf.RpcChannel channel) {
+      return new Stub(channel);
+    }
+
+    public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.TestProtobufRpcHandoffProto implements Interface {
+      private Stub(com.google.protobuf.RpcChannel channel) {
+        this.channel = channel;
+      }
+
+      private final com.google.protobuf.RpcChannel channel;
+
+      public com.google.protobuf.RpcChannel getChannel() {
+        return channel;
+      }
+
+      public  void sleep(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(0),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.class,
+            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.getDefaultInstance()));
+      }
+    }
+
+    public static BlockingInterface newBlockingStub(
+        com.google.protobuf.BlockingRpcChannel channel) {
+      return new BlockingStub(channel);
+    }
+
+    public interface BlockingInterface {
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 sleep(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 request)
+          throws com.google.protobuf.ServiceException;
+    }
+
+    private static final class BlockingStub implements BlockingInterface {
+      private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
+        this.channel = channel;
+      }
+
+      private final com.google.protobuf.BlockingRpcChannel channel;
+
+      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 sleep(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(0),
+          controller,
+          request,
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.getDefaultInstance());
+      }
+
+    }
+
+    // @@protoc_insertion_point(class_scope:hadoop.common.TestProtobufRpcHandoffProto)
+  }
+
+
+  public static com.google.protobuf.Descriptors.FileDescriptor
+      getDescriptor() {
+    return descriptor;
+  }
+  private static com.google.protobuf.Descriptors.FileDescriptor
+      descriptor;
+  static {
+    java.lang.String[] descriptorData = {
+      "\n\035test_rpc_service_legacy.proto\022\rhadoop." +
+      "common\032\021test_legacy.proto2\330\013\n\024TestProtob" +
+      "ufRpcProto\022K\n\004ping\022 .hadoop.common.Empty" +
+      "RequestProto\032!.hadoop.common.EmptyRespon" +
+      "seProto\022I\n\004echo\022\037.hadoop.common.EchoRequ" +
+      "estProto\032 .hadoop.common.EchoResponsePro" +
+      "to\022L\n\005error\022 .hadoop.common.EmptyRequest" +
+      "Proto\032!.hadoop.common.EmptyResponseProto" +
+      "\022M\n\006error2\022 .hadoop.common.EmptyRequestP" +
+      "roto\032!.hadoop.common.EmptyResponseProto\022",
+      "R\n\010slowPing\022#.hadoop.common.SlowPingRequ" +
+      "estProto\032!.hadoop.common.EmptyResponsePr" +
+      "oto\022L\n\005echo2\022 .hadoop.common.EchoRequest" +
+      "Proto2\032!.hadoop.common.EchoResponseProto" +
+      "2\022F\n\003add\022\036.hadoop.common.AddRequestProto" +
+      "\032\037.hadoop.common.AddResponseProto\022H\n\004add" +
+      "2\022\037.hadoop.common.AddRequestProto2\032\037.had" +
+      "oop.common.AddResponseProto\022T\n\rtestServe" +
+      "rGet\022 .hadoop.common.EmptyRequestProto\032!" +
+      ".hadoop.common.EmptyResponseProto\022U\n\010exc",
+      "hange\022#.hadoop.common.ExchangeRequestPro" +
+      "to\032$.hadoop.common.ExchangeResponseProto" +
+      "\022L\n\005sleep\022 .hadoop.common.SleepRequestPr" +
+      "oto\032!.hadoop.common.EmptyResponseProto\022S" +
+      "\n\014lockAndSleep\022 .hadoop.common.SleepRequ" +
+      "estProto\032!.hadoop.common.EmptyResponsePr" +
+      "oto\022Y\n\rgetAuthMethod\022 .hadoop.common.Emp" +
+      "tyRequestProto\032&.hadoop.common.AuthMetho" +
+      "dResponseProto\022Q\n\013getAuthUser\022 .hadoop.c" +
+      "ommon.EmptyRequestProto\032 .hadoop.common.",
+      "UserResponseProto\022R\n\rechoPostponed\022\037.had" +
+      "oop.common.EchoRequestProto\032 .hadoop.com" +
+      "mon.EchoResponseProto\022T\n\rsendPostponed\022 " +
+      ".hadoop.common.EmptyRequestProto\032!.hadoo" +
+      "p.common.EmptyResponseProto\022T\n\016getCurren" +
+      "tUser\022 .hadoop.common.EmptyRequestProto\032" +
+      " .hadoop.common.UserResponseProto\022Y\n\023get" +
+      "ServerRemoteUser\022 .hadoop.common.EmptyRe" +
+      "questProto\032 .hadoop.common.UserResponseP" +
+      "roto2\377\001\n\025TestProtobufRpc2Proto\022L\n\005ping2\022",
+      " .hadoop.common.EmptyRequestProto\032!.hado" +
+      "op.common.EmptyResponseProto\022J\n\005echo2\022\037." +
+      "hadoop.common.EchoRequestProto\032 .hadoop." +
+      "common.EchoResponseProto\022L\n\005sleep\022 .hado" +
+      "op.common.SleepRequestProto\032!.hadoop.com" +
+      "mon.SleepResponseProto2\257\001\n\023OldProtobufRp" +
+      "cProto\022K\n\004ping\022 .hadoop.common.EmptyRequ" +
+      "estProto\032!.hadoop.common.EmptyResponsePr" +
+      "oto\022K\n\004echo\022 .hadoop.common.EmptyRequest" +
+      "Proto\032!.hadoop.common.EmptyResponseProto",
+      "2\253\001\n\023NewProtobufRpcProto\022K\n\004ping\022 .hadoo" +
+      "p.common.EmptyRequestProto\032!.hadoop.comm" +
+      "on.EmptyResponseProto\022G\n\004echo\022\036.hadoop.c" +
+      "ommon.OptRequestProto\032\037.hadoop.common.Op" +
+      "tResponseProto2\261\001\n\025NewerProtobufRpcProto" +
+      "\022K\n\004ping\022 .hadoop.common.EmptyRequestPro" +
+      "to\032!.hadoop.common.EmptyResponseProto\022K\n" +
+      "\004echo\022 .hadoop.common.EmptyRequestProto\032" +
+      "!.hadoop.common.EmptyResponseProto2Z\n\013Cu" +
+      "stomProto\022K\n\004ping\022 .hadoop.common.EmptyR",
+      "equestProto\032!.hadoop.common.EmptyRespons" +
+      "eProto2m\n\033TestProtobufRpcHandoffProto\022N\n" +
+      "\005sleep\022!.hadoop.common.SleepRequestProto" +
+      "2\032\".hadoop.common.SleepResponseProto2BB\n" +
+      "\036org.apache.hadoop.ipc.protobufB\032TestRpc" +
+      "ServiceProtosLegacy\210\001\001\240\001\001"
+    };
+    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+        public com.google.protobuf.ExtensionRegistry assignDescriptors(
+            com.google.protobuf.Descriptors.FileDescriptor root) {
+          descriptor = root;
+          return null;
+        }
+      };
+    com.google.protobuf.Descriptors.FileDescriptor
+      .internalBuildGeneratedFileFrom(descriptorData,
+        new com.google.protobuf.Descriptors.FileDescriptor[] {
+          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.getDescriptor(),
+        }, assigner);
+  }
+
+  // @@protoc_insertion_point(outer_class_scope)
+}

+ 145 - 7
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java

@@ -17,12 +17,10 @@
  */
 package org.apache.hadoop.ipc;
 
-import org.apache.hadoop.thirdparty.protobuf.BlockingService;
-import org.apache.hadoop.thirdparty.protobuf.RpcController;
-import org.apache.hadoop.thirdparty.protobuf.ServiceException;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.ipc.RPC.RpcKind;
 import org.apache.hadoop.ipc.metrics.RpcMetrics;
 import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto;
 import org.apache.hadoop.ipc.protobuf.TestProtos;
@@ -30,38 +28,71 @@ import org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto;
 import org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto;
 import org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto;
 import org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto;
+import org.apache.hadoop.ipc.protobuf.TestProtosLegacy;
 import org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.TestProtobufRpc2Proto;
 import org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.TestProtobufRpcProto;
+import org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.thirdparty.protobuf.BlockingService;
+import org.apache.hadoop.thirdparty.protobuf.RpcController;
+import org.apache.hadoop.thirdparty.protobuf.ServiceException;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
 
 import java.io.IOException;
 import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Collection;
 import java.util.concurrent.TimeoutException;
 
-import static org.assertj.core.api.Assertions.assertThat;
 import static org.apache.hadoop.test.MetricsAsserts.assertCounterGt;
 import static org.apache.hadoop.test.MetricsAsserts.getMetrics;
+import static org.assertj.core.api.Assertions.assertThat;
 import static org.junit.Assert.fail;
+import static org.junit.Assume.assumeFalse;
 
 /**
  * Test for testing protocol buffer based RPC mechanism.
  * This test depends on test.proto definition of types in src/test/proto
  * and protobuf service definition from src/test/test_rpc_service.proto
  */
+@RunWith(Parameterized.class)
 public class TestProtoBufRpc extends TestRpcBase {
   private static RPC.Server server;
   private final static int SLEEP_DURATION = 1000;
 
+  /**
+   * Test with legacy protobuf implementation in same server.
+   */
+  private boolean testWithLegacy;
+  /**
+   * Test with legacy protobuf implementation loaded first while creating the
+   * RPC server.
+   */
+  private boolean testWithLegacyFirst;
+
+  public TestProtoBufRpc(Boolean testWithLegacy, Boolean testWithLegacyFirst) {
+    this.testWithLegacy = testWithLegacy;
+    this.testWithLegacyFirst = testWithLegacyFirst;
+  }
+
   @ProtocolInfo(protocolName = "testProto2", protocolVersion = 1)
   public interface TestRpcService2 extends
       TestProtobufRpc2Proto.BlockingInterface {
   }
 
+  @ProtocolInfo(protocolName="testProtoLegacy", protocolVersion = 1)
+  public interface TestRpcService2Legacy
+      extends TestRpcServiceProtosLegacy.
+      TestProtobufRpc2Proto.BlockingInterface {
+  }
+
   public static class PBServer2Impl implements TestRpcService2 {
 
     @Override
@@ -88,12 +119,58 @@ public class TestProtoBufRpc extends TestRpcBase {
     }
   }
 
+  public static class PBServer2ImplLegacy implements TestRpcService2Legacy {
+
+    @Override
+    public TestProtosLegacy.EmptyResponseProto ping2(
+        com.google.protobuf.RpcController unused,
+        TestProtosLegacy.EmptyRequestProto request)
+        throws com.google.protobuf.ServiceException {
+      return TestProtosLegacy.EmptyResponseProto.newBuilder().build();
+    }
+
+    @Override
+    public TestProtosLegacy.EchoResponseProto echo2(
+        com.google.protobuf.RpcController unused,
+        TestProtosLegacy.EchoRequestProto request)
+        throws com.google.protobuf.ServiceException {
+      return TestProtosLegacy.EchoResponseProto.newBuilder()
+          .setMessage(request.getMessage()).build();
+    }
+
+    @Override
+    public TestProtosLegacy.SleepResponseProto sleep(
+        com.google.protobuf.RpcController controller,
+        TestProtosLegacy.SleepRequestProto request)
+        throws com.google.protobuf.ServiceException {
+      try {
+        Thread.sleep(request.getMilliSeconds());
+      } catch (InterruptedException ex) {
+      }
+      return TestProtosLegacy.SleepResponseProto.newBuilder().build();
+    }
+  }
+
+  @Parameters
+  public static Collection<Object[]> params() {
+    Collection<Object[]> params = new ArrayList<Object[]>();
+    params.add(new Object[] {Boolean.TRUE, Boolean.TRUE });
+    params.add(new Object[] {Boolean.TRUE, Boolean.FALSE });
+    params.add(new Object[] {Boolean.FALSE, Boolean.FALSE });
+    return params;
+  }
+
   @Before
+  @SuppressWarnings("deprecation")
   public void setUp() throws IOException { // Setup server for both protocols
     conf = new Configuration();
     conf.setInt(CommonConfigurationKeys.IPC_MAXIMUM_DATA_LENGTH, 1024);
     conf.setBoolean(CommonConfigurationKeys.IPC_SERVER_LOG_SLOW_RPC, true);
     // Set RPC engine to protobuf RPC engine
+    if (testWithLegacy) {
+      RPC.setProtocolEngine(conf, TestRpcService2Legacy.class,
+          ProtobufRpcEngine.class);
+    }
     RPC.setProtocolEngine(conf, TestRpcService.class, ProtobufRpcEngine2.class);
     RPC.setProtocolEngine(conf, TestRpcService2.class,
         ProtobufRpcEngine2.class);
@@ -103,9 +180,21 @@ public class TestProtoBufRpc extends TestRpcBase {
     BlockingService service = TestProtobufRpcProto
         .newReflectiveBlockingService(serverImpl);
 
-    // Get RPC server for server side implementation
-    server = new RPC.Builder(conf).setProtocol(TestRpcService.class)
-        .setInstance(service).setBindAddress(ADDRESS).setPort(PORT).build();
+    if (testWithLegacy && testWithLegacyFirst) {
+      PBServer2ImplLegacy server2ImplLegacy = new PBServer2ImplLegacy();
+      com.google.protobuf.BlockingService legacyService =
+          TestRpcServiceProtosLegacy.TestProtobufRpc2Proto
+              .newReflectiveBlockingService(server2ImplLegacy);
+      server = new RPC.Builder(conf).setProtocol(TestRpcService2Legacy.class)
+          .setInstance(legacyService).setBindAddress(ADDRESS).setPort(PORT)
+          .build();
+      server.addProtocol(RpcKind.RPC_PROTOCOL_BUFFER, TestRpcService.class,
+          service);
+    } else {
+      // Get RPC server for server side implementation
+      server = new RPC.Builder(conf).setProtocol(TestRpcService.class)
+          .setInstance(service).setBindAddress(ADDRESS).setPort(PORT).build();
+    }
     addr = NetUtils.getConnectAddress(server);
 
     // now the second protocol
@@ -115,6 +204,16 @@ public class TestProtoBufRpc extends TestRpcBase {
 
     server.addProtocol(RPC.RpcKind.RPC_PROTOCOL_BUFFER, TestRpcService2.class,
         service2);
+
+    if (testWithLegacy && !testWithLegacyFirst) {
+      PBServer2ImplLegacy server2ImplLegacy = new PBServer2ImplLegacy();
+      com.google.protobuf.BlockingService legacyService =
+          TestRpcServiceProtosLegacy.TestProtobufRpc2Proto
+              .newReflectiveBlockingService(server2ImplLegacy);
+      server
+          .addProtocol(RpcKind.RPC_PROTOCOL_BUFFER, TestRpcService2Legacy.class,
+              legacyService);
+    }
     server.start();
   }
   
@@ -128,6 +227,10 @@ public class TestProtoBufRpc extends TestRpcBase {
     return RPC.getProxy(TestRpcService2.class, 0, addr, conf);
   }
 
+  private TestRpcService2Legacy getClientLegacy() throws IOException {
+    return RPC.getProxy(TestRpcService2Legacy.class, 0, addr, conf);
+  }
+
   @Test (timeout=5000)
   public void testProtoBufRpc() throws Exception {
     TestRpcService client = getClient(addr, conf);
@@ -179,10 +282,39 @@ public class TestProtoBufRpc extends TestRpcBase {
     MetricsRecordBuilder rpcDetailedMetrics = 
         getMetrics(server.getRpcDetailedMetrics().name());
     assertCounterGt("Echo2NumOps", 0L, rpcDetailedMetrics);
+
+    if (testWithLegacy) {
+      testProtobufLegacy();
+    }
+  }
+
+  private void testProtobufLegacy()
+      throws IOException, com.google.protobuf.ServiceException {
+    TestRpcService2Legacy client = getClientLegacy();
+
+    // Test ping method
+    client.ping2(null, TestProtosLegacy.EmptyRequestProto.newBuilder().build());
+
+    // Test echo method
+    TestProtosLegacy.EchoResponseProto echoResponse = client.echo2(null,
+        TestProtosLegacy.EchoRequestProto.newBuilder().setMessage("hello")
+            .build());
+    assertThat(echoResponse.getMessage()).isEqualTo("hello");
+
+    // Ensure RPC metrics are updated
+    MetricsRecordBuilder rpcMetrics = getMetrics(server.getRpcMetrics().name());
+    assertCounterGt("RpcQueueTimeNumOps", 0L, rpcMetrics);
+    assertCounterGt("RpcProcessingTimeNumOps", 0L, rpcMetrics);
+
+    MetricsRecordBuilder rpcDetailedMetrics =
+        getMetrics(server.getRpcDetailedMetrics().name());
+    assertCounterGt("Echo2NumOps", 0L, rpcDetailedMetrics);
   }
 
   @Test (timeout=5000)
   public void testProtoBufRandomException() throws Exception {
+    //No test with legacy
+    assumeFalse(testWithLegacy);
     TestRpcService client = getClient(addr, conf);
 
     try {
@@ -200,6 +332,8 @@ public class TestProtoBufRpc extends TestRpcBase {
   
   @Test(timeout=6000)
   public void testExtraLongRpc() throws Exception {
+    //No test with legacy
+    assumeFalse(testWithLegacy);
     TestRpcService2 client = getClient2();
     final String shortString = StringUtils.repeat("X", 4);
     // short message goes through
@@ -219,6 +353,8 @@ public class TestProtoBufRpc extends TestRpcBase {
   @Test(timeout = 12000)
   public void testLogSlowRPC() throws IOException, ServiceException,
       TimeoutException, InterruptedException {
+    //No test with legacy
+    assumeFalse(testWithLegacy);
     TestRpcService2 client = getClient2();
     // make 10 K fast calls
     for (int x = 0; x < 10000; x++) {
@@ -244,6 +380,8 @@ public class TestProtoBufRpc extends TestRpcBase {
 
   @Test(timeout = 12000)
   public void testEnsureNoLogIfDisabled() throws IOException, ServiceException {
+    //No test with legacy
+    assumeFalse(testWithLegacy);
     // disable slow RPC  logging
     server.setLogSlowRPC(false);
     TestRpcService2 client = getClient2();

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpcServerHandoff.java

@@ -145,7 +145,7 @@ public class TestProtoBufRpcServerHandoff {
         ServiceException {
       final long startTime = System.currentTimeMillis();
       final ProtobufRpcEngineCallback2 callback =
-          ProtobufRpcEngine2.Server.registerForDeferredResponse();
+          ProtobufRpcEngine2.Server.registerForDeferredResponse2();
       final long sleepTime = request.getSleepTime();
       new Thread() {
         @Override

+ 101 - 0
hadoop-common-project/hadoop-common/src/test/proto/test_legacy.proto

@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+option java_package = "org.apache.hadoop.ipc.protobuf";
+option java_outer_classname = "TestProtosLegacy";
+option java_generate_equals_and_hash = true;
+package hadoop.common;
+
+message EmptyRequestProto {
+}
+
+message EmptyResponseProto {
+}
+
+message EchoRequestProto {
+  required string message = 1;
+}
+
+message EchoResponseProto {
+  required string message = 1;
+}
+
+message OptRequestProto {
+  optional string message = 1;
+}
+
+message OptResponseProto {
+  optional string message = 1;
+}
+
+message SleepRequestProto{
+  required int32 milliSeconds = 1;
+}
+
+message SleepResponseProto{
+}
+
+message SlowPingRequestProto {
+  required bool shouldSlow = 1;
+}
+
+message EchoRequestProto2 {
+  repeated string message = 1;
+}
+
+message EchoResponseProto2 {
+  repeated string message = 1;
+}
+
+message AddRequestProto {
+  required int32 param1 = 1;
+  required int32 param2 = 2;
+}
+
+message AddRequestProto2 {
+  repeated int32 params = 1;
+}
+
+message AddResponseProto {
+  required int32 result = 1;
+}
+
+message ExchangeRequestProto {
+  repeated int32 values = 1;
+}
+
+message ExchangeResponseProto {
+  repeated int32 values = 1;
+}
+
+message AuthMethodResponseProto {
+  required int32 code = 1;
+  required string mechanismName = 2;
+}
+
+message UserResponseProto {
+  required string user = 1;
+}
+
+message SleepRequestProto2 {
+  optional int64 sleep_time = 1;
+}
+
+message SleepResponseProto2 {
+  optional int64 receive_time = 1;
+  optional int64 response_time = 2;
+}

+ 79 - 0
hadoop-common-project/hadoop-common/src/test/proto/test_rpc_service_legacy.proto

@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+syntax = "proto2";
+option java_package = "org.apache.hadoop.ipc.protobuf";
+option java_outer_classname = "TestRpcServiceProtosLegacy";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+package hadoop.common;
+
+import "test_legacy.proto";
+
+
+/**
+ * A protobuf service for use in tests
+ */
+service TestProtobufRpcProto {
+  rpc ping(EmptyRequestProto) returns (EmptyResponseProto);
+  rpc echo(EchoRequestProto) returns (EchoResponseProto);
+  rpc error(EmptyRequestProto) returns (EmptyResponseProto);
+  rpc error2(EmptyRequestProto) returns (EmptyResponseProto);
+  rpc slowPing(SlowPingRequestProto) returns (EmptyResponseProto);
+  rpc echo2(EchoRequestProto2) returns (EchoResponseProto2);
+  rpc add(AddRequestProto) returns (AddResponseProto);
+  rpc add2(AddRequestProto2) returns (AddResponseProto);
+  rpc testServerGet(EmptyRequestProto) returns (EmptyResponseProto);
+  rpc exchange(ExchangeRequestProto) returns (ExchangeResponseProto);
+  rpc sleep(SleepRequestProto) returns (EmptyResponseProto);
+  rpc lockAndSleep(SleepRequestProto) returns (EmptyResponseProto);
+  rpc getAuthMethod(EmptyRequestProto) returns (AuthMethodResponseProto);
+  rpc getAuthUser(EmptyRequestProto) returns (UserResponseProto);
+  rpc echoPostponed(EchoRequestProto) returns (EchoResponseProto);
+  rpc sendPostponed(EmptyRequestProto) returns (EmptyResponseProto);
+  rpc getCurrentUser(EmptyRequestProto) returns (UserResponseProto);
+  rpc getServerRemoteUser(EmptyRequestProto) returns (UserResponseProto);
+}
+
+service TestProtobufRpc2Proto {
+  rpc ping2(EmptyRequestProto) returns (EmptyResponseProto);
+  rpc echo2(EchoRequestProto) returns (EchoResponseProto);
+  rpc sleep(SleepRequestProto) returns (SleepResponseProto);
+}
+
+service OldProtobufRpcProto {
+  rpc ping(EmptyRequestProto) returns (EmptyResponseProto);
+  rpc echo(EmptyRequestProto) returns (EmptyResponseProto);
+}
+
+service NewProtobufRpcProto {
+  rpc ping(EmptyRequestProto) returns (EmptyResponseProto);
+  rpc echo(OptRequestProto) returns (OptResponseProto);
+}
+
+service NewerProtobufRpcProto {
+  rpc ping(EmptyRequestProto) returns (EmptyResponseProto);
+  rpc echo(EmptyRequestProto) returns (EmptyResponseProto);
+}
+
+service CustomProto {
+  rpc ping(EmptyRequestProto) returns (EmptyResponseProto);
+}
+
+service TestProtobufRpcHandoffProto {
+  rpc sleep(SleepRequestProto2) returns (SleepResponseProto2);
+}