|
@@ -0,0 +1,9892 @@
|
|
|
+/**
|
|
|
+ * Licensed to the Apache Software Foundation (ASF) under one
|
|
|
+ * or more contributor license agreements. See the NOTICE file
|
|
|
+ * distributed with this work for additional information
|
|
|
+ * regarding copyright ownership. The ASF licenses this file
|
|
|
+ * to you under the Apache License, Version 2.0 (the
|
|
|
+ * "License"); you may not use this file except in compliance
|
|
|
+ * with the License. You may obtain a copy of the License at
|
|
|
+ *
|
|
|
+ * http://www.apache.org/licenses/LICENSE-2.0
|
|
|
+ *
|
|
|
+ * Unless required by applicable law or agreed to in writing, software
|
|
|
+ * distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
+ * See the License for the specific language governing permissions and
|
|
|
+ * limitations under the License.
|
|
|
+ */
|
|
|
+// This is class is added to source because for arm protoc 2.5.0 executable
|
|
|
+// is not available to generate the same code.
|
|
|
+// Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
|
+// source: test_legacy.proto
|
|
|
+
|
|
|
+package org.apache.hadoop.ipc.protobuf;
|
|
|
+
|
|
|
+public final class TestProtosLegacy {
|
|
|
+ private TestProtosLegacy() {}
|
|
|
+ public static void registerAllExtensions(
|
|
|
+ com.google.protobuf.ExtensionRegistry registry) {
|
|
|
+ }
|
|
|
+ public interface EmptyRequestProtoOrBuilder
|
|
|
+ extends com.google.protobuf.MessageOrBuilder {
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.EmptyRequestProto}
|
|
|
+ */
|
|
|
+ public static final class EmptyRequestProto extends
|
|
|
+ com.google.protobuf.GeneratedMessage
|
|
|
+ implements EmptyRequestProtoOrBuilder {
|
|
|
+ // Use EmptyRequestProto.newBuilder() to construct.
|
|
|
+ private EmptyRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
|
|
+ super(builder);
|
|
|
+ this.unknownFields = builder.getUnknownFields();
|
|
|
+ }
|
|
|
+ private EmptyRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
|
|
+
|
|
|
+ private static final EmptyRequestProto defaultInstance;
|
|
|
+ public static EmptyRequestProto getDefaultInstance() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ public EmptyRequestProto getDefaultInstanceForType() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
|
|
|
+ @java.lang.Override
|
|
|
+ public final com.google.protobuf.UnknownFieldSet
|
|
|
+ getUnknownFields() {
|
|
|
+ return this.unknownFields;
|
|
|
+ }
|
|
|
+ private EmptyRequestProto(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ initFields();
|
|
|
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
|
|
+ com.google.protobuf.UnknownFieldSet.newBuilder();
|
|
|
+ try {
|
|
|
+ boolean done = false;
|
|
|
+ while (!done) {
|
|
|
+ int tag = input.readTag();
|
|
|
+ switch (tag) {
|
|
|
+ case 0:
|
|
|
+ done = true;
|
|
|
+ break;
|
|
|
+ default: {
|
|
|
+ if (!parseUnknownField(input, unknownFields,
|
|
|
+ extensionRegistry, tag)) {
|
|
|
+ done = true;
|
|
|
+ }
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ throw e.setUnfinishedMessage(this);
|
|
|
+ } catch (java.io.IOException e) {
|
|
|
+ throw new com.google.protobuf.InvalidProtocolBufferException(
|
|
|
+ e.getMessage()).setUnfinishedMessage(this);
|
|
|
+ } finally {
|
|
|
+ this.unknownFields = unknownFields.build();
|
|
|
+ makeExtensionsImmutable();
|
|
|
+ }
|
|
|
+ }
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EmptyRequestProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static com.google.protobuf.Parser<EmptyRequestProto> PARSER =
|
|
|
+ new com.google.protobuf.AbstractParser<EmptyRequestProto>() {
|
|
|
+ public EmptyRequestProto parsePartialFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return new EmptyRequestProto(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ };
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public com.google.protobuf.Parser<EmptyRequestProto> getParserForType() {
|
|
|
+ return PARSER;
|
|
|
+ }
|
|
|
+
|
|
|
+ private void initFields() {
|
|
|
+ }
|
|
|
+ private byte memoizedIsInitialized = -1;
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ byte isInitialized = memoizedIsInitialized;
|
|
|
+ if (isInitialized != -1) return isInitialized == 1;
|
|
|
+
|
|
|
+ memoizedIsInitialized = 1;
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
|
|
|
+ throws java.io.IOException {
|
|
|
+ getSerializedSize();
|
|
|
+ getUnknownFields().writeTo(output);
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedSerializedSize = -1;
|
|
|
+ public int getSerializedSize() {
|
|
|
+ int size = memoizedSerializedSize;
|
|
|
+ if (size != -1) return size;
|
|
|
+
|
|
|
+ size = 0;
|
|
|
+ size += getUnknownFields().getSerializedSize();
|
|
|
+ memoizedSerializedSize = size;
|
|
|
+ return size;
|
|
|
+ }
|
|
|
+
|
|
|
+ private static final long serialVersionUID = 0L;
|
|
|
+ @java.lang.Override
|
|
|
+ protected java.lang.Object writeReplace()
|
|
|
+ throws java.io.ObjectStreamException {
|
|
|
+ return super.writeReplace();
|
|
|
+ }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public boolean equals(final java.lang.Object obj) {
|
|
|
+ if (obj == this) {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)) {
|
|
|
+ return super.equals(obj);
|
|
|
+ }
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto) obj;
|
|
|
+
|
|
|
+ boolean result = true;
|
|
|
+ result = result &&
|
|
|
+ getUnknownFields().equals(other.getUnknownFields());
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedHashCode = 0;
|
|
|
+ @java.lang.Override
|
|
|
+ public int hashCode() {
|
|
|
+ if (memoizedHashCode != 0) {
|
|
|
+ return memoizedHashCode;
|
|
|
+ }
|
|
|
+ int hash = 41;
|
|
|
+ hash = (19 * hash) + getDescriptorForType().hashCode();
|
|
|
+ hash = (29 * hash) + getUnknownFields().hashCode();
|
|
|
+ memoizedHashCode = hash;
|
|
|
+ return hash;
|
|
|
+ }
|
|
|
+
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto parseFrom(byte[] data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto parseFrom(
|
|
|
+ byte[] data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto parseFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto parseFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto parseDelimitedFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto parseDelimitedFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static Builder newBuilder() { return Builder.create(); }
|
|
|
+ public Builder newBuilderForType() { return newBuilder(); }
|
|
|
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto prototype) {
|
|
|
+ return newBuilder().mergeFrom(prototype);
|
|
|
+ }
|
|
|
+ public Builder toBuilder() { return newBuilder(this); }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ protected Builder newBuilderForType(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ Builder builder = new Builder(parent);
|
|
|
+ return builder;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.EmptyRequestProto}
|
|
|
+ */
|
|
|
+ public static final class Builder extends
|
|
|
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
|
|
|
+ implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProtoOrBuilder {
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EmptyRequestProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.newBuilder()
|
|
|
+ private Builder() {
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+
|
|
|
+ private Builder(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ super(parent);
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+ private void maybeForceBuilderInitialization() {
|
|
|
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
|
|
|
+ }
|
|
|
+ }
|
|
|
+ private static Builder create() {
|
|
|
+ return new Builder();
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clear() {
|
|
|
+ super.clear();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clone() {
|
|
|
+ return create().mergeFrom(buildPartial());
|
|
|
+ }
|
|
|
+
|
|
|
+ public com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptorForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EmptyRequestProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto getDefaultInstanceForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto build() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto result = buildPartial();
|
|
|
+ if (!result.isInitialized()) {
|
|
|
+ throw newUninitializedMessageException(result);
|
|
|
+ }
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto buildPartial() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto(this);
|
|
|
+ onBuilt();
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(com.google.protobuf.Message other) {
|
|
|
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto) {
|
|
|
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)other);
|
|
|
+ } else {
|
|
|
+ super.mergeFrom(other);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto other) {
|
|
|
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance()) return this;
|
|
|
+ this.mergeUnknownFields(other.getUnknownFields());
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto parsedMessage = null;
|
|
|
+ try {
|
|
|
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto) e.getUnfinishedMessage();
|
|
|
+ throw e;
|
|
|
+ } finally {
|
|
|
+ if (parsedMessage != null) {
|
|
|
+ mergeFrom(parsedMessage);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(builder_scope:hadoop.common.EmptyRequestProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ static {
|
|
|
+ defaultInstance = new EmptyRequestProto(true);
|
|
|
+ defaultInstance.initFields();
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(class_scope:hadoop.common.EmptyRequestProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ public interface EmptyResponseProtoOrBuilder
|
|
|
+ extends com.google.protobuf.MessageOrBuilder {
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.EmptyResponseProto}
|
|
|
+ */
|
|
|
+ public static final class EmptyResponseProto extends
|
|
|
+ com.google.protobuf.GeneratedMessage
|
|
|
+ implements EmptyResponseProtoOrBuilder {
|
|
|
+ // Use EmptyResponseProto.newBuilder() to construct.
|
|
|
+ private EmptyResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
|
|
+ super(builder);
|
|
|
+ this.unknownFields = builder.getUnknownFields();
|
|
|
+ }
|
|
|
+ private EmptyResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
|
|
+
|
|
|
+ private static final EmptyResponseProto defaultInstance;
|
|
|
+ public static EmptyResponseProto getDefaultInstance() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ public EmptyResponseProto getDefaultInstanceForType() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
|
|
|
+ @java.lang.Override
|
|
|
+ public final com.google.protobuf.UnknownFieldSet
|
|
|
+ getUnknownFields() {
|
|
|
+ return this.unknownFields;
|
|
|
+ }
|
|
|
+ private EmptyResponseProto(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ initFields();
|
|
|
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
|
|
+ com.google.protobuf.UnknownFieldSet.newBuilder();
|
|
|
+ try {
|
|
|
+ boolean done = false;
|
|
|
+ while (!done) {
|
|
|
+ int tag = input.readTag();
|
|
|
+ switch (tag) {
|
|
|
+ case 0:
|
|
|
+ done = true;
|
|
|
+ break;
|
|
|
+ default: {
|
|
|
+ if (!parseUnknownField(input, unknownFields,
|
|
|
+ extensionRegistry, tag)) {
|
|
|
+ done = true;
|
|
|
+ }
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ throw e.setUnfinishedMessage(this);
|
|
|
+ } catch (java.io.IOException e) {
|
|
|
+ throw new com.google.protobuf.InvalidProtocolBufferException(
|
|
|
+ e.getMessage()).setUnfinishedMessage(this);
|
|
|
+ } finally {
|
|
|
+ this.unknownFields = unknownFields.build();
|
|
|
+ makeExtensionsImmutable();
|
|
|
+ }
|
|
|
+ }
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EmptyResponseProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static com.google.protobuf.Parser<EmptyResponseProto> PARSER =
|
|
|
+ new com.google.protobuf.AbstractParser<EmptyResponseProto>() {
|
|
|
+ public EmptyResponseProto parsePartialFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return new EmptyResponseProto(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ };
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public com.google.protobuf.Parser<EmptyResponseProto> getParserForType() {
|
|
|
+ return PARSER;
|
|
|
+ }
|
|
|
+
|
|
|
+ private void initFields() {
|
|
|
+ }
|
|
|
+ private byte memoizedIsInitialized = -1;
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ byte isInitialized = memoizedIsInitialized;
|
|
|
+ if (isInitialized != -1) return isInitialized == 1;
|
|
|
+
|
|
|
+ memoizedIsInitialized = 1;
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
|
|
|
+ throws java.io.IOException {
|
|
|
+ getSerializedSize();
|
|
|
+ getUnknownFields().writeTo(output);
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedSerializedSize = -1;
|
|
|
+ public int getSerializedSize() {
|
|
|
+ int size = memoizedSerializedSize;
|
|
|
+ if (size != -1) return size;
|
|
|
+
|
|
|
+ size = 0;
|
|
|
+ size += getUnknownFields().getSerializedSize();
|
|
|
+ memoizedSerializedSize = size;
|
|
|
+ return size;
|
|
|
+ }
|
|
|
+
|
|
|
+ private static final long serialVersionUID = 0L;
|
|
|
+ @java.lang.Override
|
|
|
+ protected java.lang.Object writeReplace()
|
|
|
+ throws java.io.ObjectStreamException {
|
|
|
+ return super.writeReplace();
|
|
|
+ }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public boolean equals(final java.lang.Object obj) {
|
|
|
+ if (obj == this) {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto)) {
|
|
|
+ return super.equals(obj);
|
|
|
+ }
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) obj;
|
|
|
+
|
|
|
+ boolean result = true;
|
|
|
+ result = result &&
|
|
|
+ getUnknownFields().equals(other.getUnknownFields());
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedHashCode = 0;
|
|
|
+ @java.lang.Override
|
|
|
+ public int hashCode() {
|
|
|
+ if (memoizedHashCode != 0) {
|
|
|
+ return memoizedHashCode;
|
|
|
+ }
|
|
|
+ int hash = 41;
|
|
|
+ hash = (19 * hash) + getDescriptorForType().hashCode();
|
|
|
+ hash = (29 * hash) + getUnknownFields().hashCode();
|
|
|
+ memoizedHashCode = hash;
|
|
|
+ return hash;
|
|
|
+ }
|
|
|
+
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto parseFrom(byte[] data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto parseFrom(
|
|
|
+ byte[] data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto parseFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto parseFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto parseDelimitedFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto parseDelimitedFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static Builder newBuilder() { return Builder.create(); }
|
|
|
+ public Builder newBuilderForType() { return newBuilder(); }
|
|
|
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto prototype) {
|
|
|
+ return newBuilder().mergeFrom(prototype);
|
|
|
+ }
|
|
|
+ public Builder toBuilder() { return newBuilder(this); }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ protected Builder newBuilderForType(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ Builder builder = new Builder(parent);
|
|
|
+ return builder;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.EmptyResponseProto}
|
|
|
+ */
|
|
|
+ public static final class Builder extends
|
|
|
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
|
|
|
+ implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProtoOrBuilder {
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EmptyResponseProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.newBuilder()
|
|
|
+ private Builder() {
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+
|
|
|
+ private Builder(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ super(parent);
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+ private void maybeForceBuilderInitialization() {
|
|
|
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
|
|
|
+ }
|
|
|
+ }
|
|
|
+ private static Builder create() {
|
|
|
+ return new Builder();
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clear() {
|
|
|
+ super.clear();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clone() {
|
|
|
+ return create().mergeFrom(buildPartial());
|
|
|
+ }
|
|
|
+
|
|
|
+ public com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptorForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EmptyResponseProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto getDefaultInstanceForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto build() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto result = buildPartial();
|
|
|
+ if (!result.isInitialized()) {
|
|
|
+ throw newUninitializedMessageException(result);
|
|
|
+ }
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto buildPartial() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto(this);
|
|
|
+ onBuilt();
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(com.google.protobuf.Message other) {
|
|
|
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) {
|
|
|
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto)other);
|
|
|
+ } else {
|
|
|
+ super.mergeFrom(other);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto other) {
|
|
|
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()) return this;
|
|
|
+ this.mergeUnknownFields(other.getUnknownFields());
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto parsedMessage = null;
|
|
|
+ try {
|
|
|
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) e.getUnfinishedMessage();
|
|
|
+ throw e;
|
|
|
+ } finally {
|
|
|
+ if (parsedMessage != null) {
|
|
|
+ mergeFrom(parsedMessage);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(builder_scope:hadoop.common.EmptyResponseProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ static {
|
|
|
+ defaultInstance = new EmptyResponseProto(true);
|
|
|
+ defaultInstance.initFields();
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(class_scope:hadoop.common.EmptyResponseProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ public interface EchoRequestProtoOrBuilder
|
|
|
+ extends com.google.protobuf.MessageOrBuilder {
|
|
|
+
|
|
|
+ // required string message = 1;
|
|
|
+ /**
|
|
|
+ * <code>required string message = 1;</code>
|
|
|
+ */
|
|
|
+ boolean hasMessage();
|
|
|
+ /**
|
|
|
+ * <code>required string message = 1;</code>
|
|
|
+ */
|
|
|
+ java.lang.String getMessage();
|
|
|
+ /**
|
|
|
+ * <code>required string message = 1;</code>
|
|
|
+ */
|
|
|
+ com.google.protobuf.ByteString
|
|
|
+ getMessageBytes();
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.EchoRequestProto}
|
|
|
+ */
|
|
|
+ public static final class EchoRequestProto extends
|
|
|
+ com.google.protobuf.GeneratedMessage
|
|
|
+ implements EchoRequestProtoOrBuilder {
|
|
|
+ // Use EchoRequestProto.newBuilder() to construct.
|
|
|
+ private EchoRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
|
|
+ super(builder);
|
|
|
+ this.unknownFields = builder.getUnknownFields();
|
|
|
+ }
|
|
|
+ private EchoRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
|
|
+
|
|
|
+ private static final EchoRequestProto defaultInstance;
|
|
|
+ public static EchoRequestProto getDefaultInstance() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ public EchoRequestProto getDefaultInstanceForType() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
|
|
|
+ @java.lang.Override
|
|
|
+ public final com.google.protobuf.UnknownFieldSet
|
|
|
+ getUnknownFields() {
|
|
|
+ return this.unknownFields;
|
|
|
+ }
|
|
|
+ private EchoRequestProto(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ initFields();
|
|
|
+ int mutable_bitField0_ = 0;
|
|
|
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
|
|
+ com.google.protobuf.UnknownFieldSet.newBuilder();
|
|
|
+ try {
|
|
|
+ boolean done = false;
|
|
|
+ while (!done) {
|
|
|
+ int tag = input.readTag();
|
|
|
+ switch (tag) {
|
|
|
+ case 0:
|
|
|
+ done = true;
|
|
|
+ break;
|
|
|
+ default: {
|
|
|
+ if (!parseUnknownField(input, unknownFields,
|
|
|
+ extensionRegistry, tag)) {
|
|
|
+ done = true;
|
|
|
+ }
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 10: {
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ message_ = input.readBytes();
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ throw e.setUnfinishedMessage(this);
|
|
|
+ } catch (java.io.IOException e) {
|
|
|
+ throw new com.google.protobuf.InvalidProtocolBufferException(
|
|
|
+ e.getMessage()).setUnfinishedMessage(this);
|
|
|
+ } finally {
|
|
|
+ this.unknownFields = unknownFields.build();
|
|
|
+ makeExtensionsImmutable();
|
|
|
+ }
|
|
|
+ }
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoRequestProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static com.google.protobuf.Parser<EchoRequestProto> PARSER =
|
|
|
+ new com.google.protobuf.AbstractParser<EchoRequestProto>() {
|
|
|
+ public EchoRequestProto parsePartialFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return new EchoRequestProto(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ };
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public com.google.protobuf.Parser<EchoRequestProto> getParserForType() {
|
|
|
+ return PARSER;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int bitField0_;
|
|
|
+ // required string message = 1;
|
|
|
+ public static final int MESSAGE_FIELD_NUMBER = 1;
|
|
|
+ private java.lang.Object message_;
|
|
|
+ /**
|
|
|
+ * <code>required string message = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean hasMessage() {
|
|
|
+ return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string message = 1;</code>
|
|
|
+ */
|
|
|
+ public java.lang.String getMessage() {
|
|
|
+ java.lang.Object ref = message_;
|
|
|
+ if (ref instanceof java.lang.String) {
|
|
|
+ return (java.lang.String) ref;
|
|
|
+ } else {
|
|
|
+ com.google.protobuf.ByteString bs =
|
|
|
+ (com.google.protobuf.ByteString) ref;
|
|
|
+ java.lang.String s = bs.toStringUtf8();
|
|
|
+ if (bs.isValidUtf8()) {
|
|
|
+ message_ = s;
|
|
|
+ }
|
|
|
+ return s;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string message = 1;</code>
|
|
|
+ */
|
|
|
+ public com.google.protobuf.ByteString
|
|
|
+ getMessageBytes() {
|
|
|
+ java.lang.Object ref = message_;
|
|
|
+ if (ref instanceof java.lang.String) {
|
|
|
+ com.google.protobuf.ByteString b =
|
|
|
+ com.google.protobuf.ByteString.copyFromUtf8(
|
|
|
+ (java.lang.String) ref);
|
|
|
+ message_ = b;
|
|
|
+ return b;
|
|
|
+ } else {
|
|
|
+ return (com.google.protobuf.ByteString) ref;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ private void initFields() {
|
|
|
+ message_ = "";
|
|
|
+ }
|
|
|
+ private byte memoizedIsInitialized = -1;
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ byte isInitialized = memoizedIsInitialized;
|
|
|
+ if (isInitialized != -1) return isInitialized == 1;
|
|
|
+
|
|
|
+ if (!hasMessage()) {
|
|
|
+ memoizedIsInitialized = 0;
|
|
|
+ return false;
|
|
|
+ }
|
|
|
+ memoizedIsInitialized = 1;
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
|
|
|
+ throws java.io.IOException {
|
|
|
+ getSerializedSize();
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ output.writeBytes(1, getMessageBytes());
|
|
|
+ }
|
|
|
+ getUnknownFields().writeTo(output);
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedSerializedSize = -1;
|
|
|
+ public int getSerializedSize() {
|
|
|
+ int size = memoizedSerializedSize;
|
|
|
+ if (size != -1) return size;
|
|
|
+
|
|
|
+ size = 0;
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ size += com.google.protobuf.CodedOutputStream
|
|
|
+ .computeBytesSize(1, getMessageBytes());
|
|
|
+ }
|
|
|
+ size += getUnknownFields().getSerializedSize();
|
|
|
+ memoizedSerializedSize = size;
|
|
|
+ return size;
|
|
|
+ }
|
|
|
+
|
|
|
+ private static final long serialVersionUID = 0L;
|
|
|
+ @java.lang.Override
|
|
|
+ protected java.lang.Object writeReplace()
|
|
|
+ throws java.io.ObjectStreamException {
|
|
|
+ return super.writeReplace();
|
|
|
+ }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public boolean equals(final java.lang.Object obj) {
|
|
|
+ if (obj == this) {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto)) {
|
|
|
+ return super.equals(obj);
|
|
|
+ }
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto) obj;
|
|
|
+
|
|
|
+ boolean result = true;
|
|
|
+ result = result && (hasMessage() == other.hasMessage());
|
|
|
+ if (hasMessage()) {
|
|
|
+ result = result && getMessage()
|
|
|
+ .equals(other.getMessage());
|
|
|
+ }
|
|
|
+ result = result &&
|
|
|
+ getUnknownFields().equals(other.getUnknownFields());
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedHashCode = 0;
|
|
|
+ @java.lang.Override
|
|
|
+ public int hashCode() {
|
|
|
+ if (memoizedHashCode != 0) {
|
|
|
+ return memoizedHashCode;
|
|
|
+ }
|
|
|
+ int hash = 41;
|
|
|
+ hash = (19 * hash) + getDescriptorForType().hashCode();
|
|
|
+ if (hasMessage()) {
|
|
|
+ hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
|
|
|
+ hash = (53 * hash) + getMessage().hashCode();
|
|
|
+ }
|
|
|
+ hash = (29 * hash) + getUnknownFields().hashCode();
|
|
|
+ memoizedHashCode = hash;
|
|
|
+ return hash;
|
|
|
+ }
|
|
|
+
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto parseFrom(byte[] data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto parseFrom(
|
|
|
+ byte[] data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto parseFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto parseFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto parseDelimitedFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto parseDelimitedFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static Builder newBuilder() { return Builder.create(); }
|
|
|
+ public Builder newBuilderForType() { return newBuilder(); }
|
|
|
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto prototype) {
|
|
|
+ return newBuilder().mergeFrom(prototype);
|
|
|
+ }
|
|
|
+ public Builder toBuilder() { return newBuilder(this); }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ protected Builder newBuilderForType(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ Builder builder = new Builder(parent);
|
|
|
+ return builder;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.EchoRequestProto}
|
|
|
+ */
|
|
|
+ public static final class Builder extends
|
|
|
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
|
|
|
+ implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProtoOrBuilder {
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoRequestProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.newBuilder()
|
|
|
+ private Builder() {
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+
|
|
|
+ private Builder(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ super(parent);
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+ private void maybeForceBuilderInitialization() {
|
|
|
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
|
|
|
+ }
|
|
|
+ }
|
|
|
+ private static Builder create() {
|
|
|
+ return new Builder();
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clear() {
|
|
|
+ super.clear();
|
|
|
+ message_ = "";
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clone() {
|
|
|
+ return create().mergeFrom(buildPartial());
|
|
|
+ }
|
|
|
+
|
|
|
+ public com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptorForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoRequestProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto getDefaultInstanceForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.getDefaultInstance();
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto build() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto result = buildPartial();
|
|
|
+ if (!result.isInitialized()) {
|
|
|
+ throw newUninitializedMessageException(result);
|
|
|
+ }
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto buildPartial() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto(this);
|
|
|
+ int from_bitField0_ = bitField0_;
|
|
|
+ int to_bitField0_ = 0;
|
|
|
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ to_bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ result.message_ = message_;
|
|
|
+ result.bitField0_ = to_bitField0_;
|
|
|
+ onBuilt();
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(com.google.protobuf.Message other) {
|
|
|
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto) {
|
|
|
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto)other);
|
|
|
+ } else {
|
|
|
+ super.mergeFrom(other);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto other) {
|
|
|
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.getDefaultInstance()) return this;
|
|
|
+ if (other.hasMessage()) {
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ message_ = other.message_;
|
|
|
+ onChanged();
|
|
|
+ }
|
|
|
+ this.mergeUnknownFields(other.getUnknownFields());
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ if (!hasMessage()) {
|
|
|
+ return false;
|
|
|
+ }
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto parsedMessage = null;
|
|
|
+ try {
|
|
|
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto) e.getUnfinishedMessage();
|
|
|
+ throw e;
|
|
|
+ } finally {
|
|
|
+ if (parsedMessage != null) {
|
|
|
+ mergeFrom(parsedMessage);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ private int bitField0_;
|
|
|
+
|
|
|
+ // required string message = 1;
|
|
|
+ private java.lang.Object message_ = "";
|
|
|
+ /**
|
|
|
+ * <code>required string message = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean hasMessage() {
|
|
|
+ return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string message = 1;</code>
|
|
|
+ */
|
|
|
+ public java.lang.String getMessage() {
|
|
|
+ java.lang.Object ref = message_;
|
|
|
+ if (!(ref instanceof java.lang.String)) {
|
|
|
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
|
|
|
+ .toStringUtf8();
|
|
|
+ message_ = s;
|
|
|
+ return s;
|
|
|
+ } else {
|
|
|
+ return (java.lang.String) ref;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string message = 1;</code>
|
|
|
+ */
|
|
|
+ public com.google.protobuf.ByteString
|
|
|
+ getMessageBytes() {
|
|
|
+ java.lang.Object ref = message_;
|
|
|
+ if (ref instanceof String) {
|
|
|
+ com.google.protobuf.ByteString b =
|
|
|
+ com.google.protobuf.ByteString.copyFromUtf8(
|
|
|
+ (java.lang.String) ref);
|
|
|
+ message_ = b;
|
|
|
+ return b;
|
|
|
+ } else {
|
|
|
+ return (com.google.protobuf.ByteString) ref;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string message = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder setMessage(
|
|
|
+ java.lang.String value) {
|
|
|
+ if (value == null) {
|
|
|
+ throw new NullPointerException();
|
|
|
+ }
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ message_ = value;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string message = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder clearMessage() {
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ message_ = getDefaultInstance().getMessage();
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string message = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder setMessageBytes(
|
|
|
+ com.google.protobuf.ByteString value) {
|
|
|
+ if (value == null) {
|
|
|
+ throw new NullPointerException();
|
|
|
+ }
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ message_ = value;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(builder_scope:hadoop.common.EchoRequestProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ static {
|
|
|
+ defaultInstance = new EchoRequestProto(true);
|
|
|
+ defaultInstance.initFields();
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(class_scope:hadoop.common.EchoRequestProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ public interface EchoResponseProtoOrBuilder
|
|
|
+ extends com.google.protobuf.MessageOrBuilder {
|
|
|
+
|
|
|
+ // required string message = 1;
|
|
|
+ /**
|
|
|
+ * <code>required string message = 1;</code>
|
|
|
+ */
|
|
|
+ boolean hasMessage();
|
|
|
+ /**
|
|
|
+ * <code>required string message = 1;</code>
|
|
|
+ */
|
|
|
+ java.lang.String getMessage();
|
|
|
+ /**
|
|
|
+ * <code>required string message = 1;</code>
|
|
|
+ */
|
|
|
+ com.google.protobuf.ByteString
|
|
|
+ getMessageBytes();
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.EchoResponseProto}
|
|
|
+ */
|
|
|
+ public static final class EchoResponseProto extends
|
|
|
+ com.google.protobuf.GeneratedMessage
|
|
|
+ implements EchoResponseProtoOrBuilder {
|
|
|
+ // Use EchoResponseProto.newBuilder() to construct.
|
|
|
+ private EchoResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
|
|
+ super(builder);
|
|
|
+ this.unknownFields = builder.getUnknownFields();
|
|
|
+ }
|
|
|
+ private EchoResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
|
|
+
|
|
|
+ private static final EchoResponseProto defaultInstance;
|
|
|
+ public static EchoResponseProto getDefaultInstance() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ public EchoResponseProto getDefaultInstanceForType() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
|
|
|
+ @java.lang.Override
|
|
|
+ public final com.google.protobuf.UnknownFieldSet
|
|
|
+ getUnknownFields() {
|
|
|
+ return this.unknownFields;
|
|
|
+ }
|
|
|
+ private EchoResponseProto(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ initFields();
|
|
|
+ int mutable_bitField0_ = 0;
|
|
|
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
|
|
+ com.google.protobuf.UnknownFieldSet.newBuilder();
|
|
|
+ try {
|
|
|
+ boolean done = false;
|
|
|
+ while (!done) {
|
|
|
+ int tag = input.readTag();
|
|
|
+ switch (tag) {
|
|
|
+ case 0:
|
|
|
+ done = true;
|
|
|
+ break;
|
|
|
+ default: {
|
|
|
+ if (!parseUnknownField(input, unknownFields,
|
|
|
+ extensionRegistry, tag)) {
|
|
|
+ done = true;
|
|
|
+ }
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 10: {
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ message_ = input.readBytes();
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ throw e.setUnfinishedMessage(this);
|
|
|
+ } catch (java.io.IOException e) {
|
|
|
+ throw new com.google.protobuf.InvalidProtocolBufferException(
|
|
|
+ e.getMessage()).setUnfinishedMessage(this);
|
|
|
+ } finally {
|
|
|
+ this.unknownFields = unknownFields.build();
|
|
|
+ makeExtensionsImmutable();
|
|
|
+ }
|
|
|
+ }
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoResponseProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static com.google.protobuf.Parser<EchoResponseProto> PARSER =
|
|
|
+ new com.google.protobuf.AbstractParser<EchoResponseProto>() {
|
|
|
+ public EchoResponseProto parsePartialFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return new EchoResponseProto(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ };
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public com.google.protobuf.Parser<EchoResponseProto> getParserForType() {
|
|
|
+ return PARSER;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int bitField0_;
|
|
|
+ // required string message = 1;
|
|
|
+ public static final int MESSAGE_FIELD_NUMBER = 1;
|
|
|
+ private java.lang.Object message_;
|
|
|
+ /**
|
|
|
+ * <code>required string message = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean hasMessage() {
|
|
|
+ return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string message = 1;</code>
|
|
|
+ */
|
|
|
+ public java.lang.String getMessage() {
|
|
|
+ java.lang.Object ref = message_;
|
|
|
+ if (ref instanceof java.lang.String) {
|
|
|
+ return (java.lang.String) ref;
|
|
|
+ } else {
|
|
|
+ com.google.protobuf.ByteString bs =
|
|
|
+ (com.google.protobuf.ByteString) ref;
|
|
|
+ java.lang.String s = bs.toStringUtf8();
|
|
|
+ if (bs.isValidUtf8()) {
|
|
|
+ message_ = s;
|
|
|
+ }
|
|
|
+ return s;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string message = 1;</code>
|
|
|
+ */
|
|
|
+ public com.google.protobuf.ByteString
|
|
|
+ getMessageBytes() {
|
|
|
+ java.lang.Object ref = message_;
|
|
|
+ if (ref instanceof java.lang.String) {
|
|
|
+ com.google.protobuf.ByteString b =
|
|
|
+ com.google.protobuf.ByteString.copyFromUtf8(
|
|
|
+ (java.lang.String) ref);
|
|
|
+ message_ = b;
|
|
|
+ return b;
|
|
|
+ } else {
|
|
|
+ return (com.google.protobuf.ByteString) ref;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ private void initFields() {
|
|
|
+ message_ = "";
|
|
|
+ }
|
|
|
+ private byte memoizedIsInitialized = -1;
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ byte isInitialized = memoizedIsInitialized;
|
|
|
+ if (isInitialized != -1) return isInitialized == 1;
|
|
|
+
|
|
|
+ if (!hasMessage()) {
|
|
|
+ memoizedIsInitialized = 0;
|
|
|
+ return false;
|
|
|
+ }
|
|
|
+ memoizedIsInitialized = 1;
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
|
|
|
+ throws java.io.IOException {
|
|
|
+ getSerializedSize();
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ output.writeBytes(1, getMessageBytes());
|
|
|
+ }
|
|
|
+ getUnknownFields().writeTo(output);
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedSerializedSize = -1;
|
|
|
+ public int getSerializedSize() {
|
|
|
+ int size = memoizedSerializedSize;
|
|
|
+ if (size != -1) return size;
|
|
|
+
|
|
|
+ size = 0;
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ size += com.google.protobuf.CodedOutputStream
|
|
|
+ .computeBytesSize(1, getMessageBytes());
|
|
|
+ }
|
|
|
+ size += getUnknownFields().getSerializedSize();
|
|
|
+ memoizedSerializedSize = size;
|
|
|
+ return size;
|
|
|
+ }
|
|
|
+
|
|
|
+ private static final long serialVersionUID = 0L;
|
|
|
+ @java.lang.Override
|
|
|
+ protected java.lang.Object writeReplace()
|
|
|
+ throws java.io.ObjectStreamException {
|
|
|
+ return super.writeReplace();
|
|
|
+ }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public boolean equals(final java.lang.Object obj) {
|
|
|
+ if (obj == this) {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto)) {
|
|
|
+ return super.equals(obj);
|
|
|
+ }
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto) obj;
|
|
|
+
|
|
|
+ boolean result = true;
|
|
|
+ result = result && (hasMessage() == other.hasMessage());
|
|
|
+ if (hasMessage()) {
|
|
|
+ result = result && getMessage()
|
|
|
+ .equals(other.getMessage());
|
|
|
+ }
|
|
|
+ result = result &&
|
|
|
+ getUnknownFields().equals(other.getUnknownFields());
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedHashCode = 0;
|
|
|
+ @java.lang.Override
|
|
|
+ public int hashCode() {
|
|
|
+ if (memoizedHashCode != 0) {
|
|
|
+ return memoizedHashCode;
|
|
|
+ }
|
|
|
+ int hash = 41;
|
|
|
+ hash = (19 * hash) + getDescriptorForType().hashCode();
|
|
|
+ if (hasMessage()) {
|
|
|
+ hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
|
|
|
+ hash = (53 * hash) + getMessage().hashCode();
|
|
|
+ }
|
|
|
+ hash = (29 * hash) + getUnknownFields().hashCode();
|
|
|
+ memoizedHashCode = hash;
|
|
|
+ return hash;
|
|
|
+ }
|
|
|
+
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseFrom(byte[] data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseFrom(
|
|
|
+ byte[] data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseDelimitedFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseDelimitedFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static Builder newBuilder() { return Builder.create(); }
|
|
|
+ public Builder newBuilderForType() { return newBuilder(); }
|
|
|
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto prototype) {
|
|
|
+ return newBuilder().mergeFrom(prototype);
|
|
|
+ }
|
|
|
+ public Builder toBuilder() { return newBuilder(this); }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ protected Builder newBuilderForType(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ Builder builder = new Builder(parent);
|
|
|
+ return builder;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.EchoResponseProto}
|
|
|
+ */
|
|
|
+ public static final class Builder extends
|
|
|
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
|
|
|
+ implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProtoOrBuilder {
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoResponseProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.newBuilder()
|
|
|
+ private Builder() {
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+
|
|
|
+ private Builder(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ super(parent);
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+ private void maybeForceBuilderInitialization() {
|
|
|
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
|
|
|
+ }
|
|
|
+ }
|
|
|
+ private static Builder create() {
|
|
|
+ return new Builder();
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clear() {
|
|
|
+ super.clear();
|
|
|
+ message_ = "";
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clone() {
|
|
|
+ return create().mergeFrom(buildPartial());
|
|
|
+ }
|
|
|
+
|
|
|
+ public com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptorForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoResponseProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto getDefaultInstanceForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance();
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto build() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto result = buildPartial();
|
|
|
+ if (!result.isInitialized()) {
|
|
|
+ throw newUninitializedMessageException(result);
|
|
|
+ }
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto buildPartial() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto(this);
|
|
|
+ int from_bitField0_ = bitField0_;
|
|
|
+ int to_bitField0_ = 0;
|
|
|
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ to_bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ result.message_ = message_;
|
|
|
+ result.bitField0_ = to_bitField0_;
|
|
|
+ onBuilt();
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(com.google.protobuf.Message other) {
|
|
|
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto) {
|
|
|
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto)other);
|
|
|
+ } else {
|
|
|
+ super.mergeFrom(other);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto other) {
|
|
|
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance()) return this;
|
|
|
+ if (other.hasMessage()) {
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ message_ = other.message_;
|
|
|
+ onChanged();
|
|
|
+ }
|
|
|
+ this.mergeUnknownFields(other.getUnknownFields());
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ if (!hasMessage()) {
|
|
|
+ return false;
|
|
|
+ }
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parsedMessage = null;
|
|
|
+ try {
|
|
|
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto) e.getUnfinishedMessage();
|
|
|
+ throw e;
|
|
|
+ } finally {
|
|
|
+ if (parsedMessage != null) {
|
|
|
+ mergeFrom(parsedMessage);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ private int bitField0_;
|
|
|
+
|
|
|
+ // required string message = 1;
|
|
|
+ private java.lang.Object message_ = "";
|
|
|
+ /**
|
|
|
+ * <code>required string message = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean hasMessage() {
|
|
|
+ return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string message = 1;</code>
|
|
|
+ */
|
|
|
+ public java.lang.String getMessage() {
|
|
|
+ java.lang.Object ref = message_;
|
|
|
+ if (!(ref instanceof java.lang.String)) {
|
|
|
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
|
|
|
+ .toStringUtf8();
|
|
|
+ message_ = s;
|
|
|
+ return s;
|
|
|
+ } else {
|
|
|
+ return (java.lang.String) ref;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string message = 1;</code>
|
|
|
+ */
|
|
|
+ public com.google.protobuf.ByteString
|
|
|
+ getMessageBytes() {
|
|
|
+ java.lang.Object ref = message_;
|
|
|
+ if (ref instanceof String) {
|
|
|
+ com.google.protobuf.ByteString b =
|
|
|
+ com.google.protobuf.ByteString.copyFromUtf8(
|
|
|
+ (java.lang.String) ref);
|
|
|
+ message_ = b;
|
|
|
+ return b;
|
|
|
+ } else {
|
|
|
+ return (com.google.protobuf.ByteString) ref;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string message = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder setMessage(
|
|
|
+ java.lang.String value) {
|
|
|
+ if (value == null) {
|
|
|
+ throw new NullPointerException();
|
|
|
+ }
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ message_ = value;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string message = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder clearMessage() {
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ message_ = getDefaultInstance().getMessage();
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string message = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder setMessageBytes(
|
|
|
+ com.google.protobuf.ByteString value) {
|
|
|
+ if (value == null) {
|
|
|
+ throw new NullPointerException();
|
|
|
+ }
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ message_ = value;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(builder_scope:hadoop.common.EchoResponseProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ static {
|
|
|
+ defaultInstance = new EchoResponseProto(true);
|
|
|
+ defaultInstance.initFields();
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(class_scope:hadoop.common.EchoResponseProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ public interface OptRequestProtoOrBuilder
|
|
|
+ extends com.google.protobuf.MessageOrBuilder {
|
|
|
+
|
|
|
+ // optional string message = 1;
|
|
|
+ /**
|
|
|
+ * <code>optional string message = 1;</code>
|
|
|
+ */
|
|
|
+ boolean hasMessage();
|
|
|
+ /**
|
|
|
+ * <code>optional string message = 1;</code>
|
|
|
+ */
|
|
|
+ java.lang.String getMessage();
|
|
|
+ /**
|
|
|
+ * <code>optional string message = 1;</code>
|
|
|
+ */
|
|
|
+ com.google.protobuf.ByteString
|
|
|
+ getMessageBytes();
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.OptRequestProto}
|
|
|
+ */
|
|
|
+ public static final class OptRequestProto extends
|
|
|
+ com.google.protobuf.GeneratedMessage
|
|
|
+ implements OptRequestProtoOrBuilder {
|
|
|
+ // Use OptRequestProto.newBuilder() to construct.
|
|
|
+ private OptRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
|
|
+ super(builder);
|
|
|
+ this.unknownFields = builder.getUnknownFields();
|
|
|
+ }
|
|
|
+ private OptRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
|
|
+
|
|
|
+ private static final OptRequestProto defaultInstance;
|
|
|
+ public static OptRequestProto getDefaultInstance() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ public OptRequestProto getDefaultInstanceForType() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
|
|
|
+ @java.lang.Override
|
|
|
+ public final com.google.protobuf.UnknownFieldSet
|
|
|
+ getUnknownFields() {
|
|
|
+ return this.unknownFields;
|
|
|
+ }
|
|
|
+ private OptRequestProto(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ initFields();
|
|
|
+ int mutable_bitField0_ = 0;
|
|
|
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
|
|
+ com.google.protobuf.UnknownFieldSet.newBuilder();
|
|
|
+ try {
|
|
|
+ boolean done = false;
|
|
|
+ while (!done) {
|
|
|
+ int tag = input.readTag();
|
|
|
+ switch (tag) {
|
|
|
+ case 0:
|
|
|
+ done = true;
|
|
|
+ break;
|
|
|
+ default: {
|
|
|
+ if (!parseUnknownField(input, unknownFields,
|
|
|
+ extensionRegistry, tag)) {
|
|
|
+ done = true;
|
|
|
+ }
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 10: {
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ message_ = input.readBytes();
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ throw e.setUnfinishedMessage(this);
|
|
|
+ } catch (java.io.IOException e) {
|
|
|
+ throw new com.google.protobuf.InvalidProtocolBufferException(
|
|
|
+ e.getMessage()).setUnfinishedMessage(this);
|
|
|
+ } finally {
|
|
|
+ this.unknownFields = unknownFields.build();
|
|
|
+ makeExtensionsImmutable();
|
|
|
+ }
|
|
|
+ }
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_OptRequestProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_OptRequestProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static com.google.protobuf.Parser<OptRequestProto> PARSER =
|
|
|
+ new com.google.protobuf.AbstractParser<OptRequestProto>() {
|
|
|
+ public OptRequestProto parsePartialFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return new OptRequestProto(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ };
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public com.google.protobuf.Parser<OptRequestProto> getParserForType() {
|
|
|
+ return PARSER;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int bitField0_;
|
|
|
+ // optional string message = 1;
|
|
|
+ public static final int MESSAGE_FIELD_NUMBER = 1;
|
|
|
+ private java.lang.Object message_;
|
|
|
+ /**
|
|
|
+ * <code>optional string message = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean hasMessage() {
|
|
|
+ return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional string message = 1;</code>
|
|
|
+ */
|
|
|
+ public java.lang.String getMessage() {
|
|
|
+ java.lang.Object ref = message_;
|
|
|
+ if (ref instanceof java.lang.String) {
|
|
|
+ return (java.lang.String) ref;
|
|
|
+ } else {
|
|
|
+ com.google.protobuf.ByteString bs =
|
|
|
+ (com.google.protobuf.ByteString) ref;
|
|
|
+ java.lang.String s = bs.toStringUtf8();
|
|
|
+ if (bs.isValidUtf8()) {
|
|
|
+ message_ = s;
|
|
|
+ }
|
|
|
+ return s;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional string message = 1;</code>
|
|
|
+ */
|
|
|
+ public com.google.protobuf.ByteString
|
|
|
+ getMessageBytes() {
|
|
|
+ java.lang.Object ref = message_;
|
|
|
+ if (ref instanceof java.lang.String) {
|
|
|
+ com.google.protobuf.ByteString b =
|
|
|
+ com.google.protobuf.ByteString.copyFromUtf8(
|
|
|
+ (java.lang.String) ref);
|
|
|
+ message_ = b;
|
|
|
+ return b;
|
|
|
+ } else {
|
|
|
+ return (com.google.protobuf.ByteString) ref;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ private void initFields() {
|
|
|
+ message_ = "";
|
|
|
+ }
|
|
|
+ private byte memoizedIsInitialized = -1;
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ byte isInitialized = memoizedIsInitialized;
|
|
|
+ if (isInitialized != -1) return isInitialized == 1;
|
|
|
+
|
|
|
+ memoizedIsInitialized = 1;
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
|
|
|
+ throws java.io.IOException {
|
|
|
+ getSerializedSize();
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ output.writeBytes(1, getMessageBytes());
|
|
|
+ }
|
|
|
+ getUnknownFields().writeTo(output);
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedSerializedSize = -1;
|
|
|
+ public int getSerializedSize() {
|
|
|
+ int size = memoizedSerializedSize;
|
|
|
+ if (size != -1) return size;
|
|
|
+
|
|
|
+ size = 0;
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ size += com.google.protobuf.CodedOutputStream
|
|
|
+ .computeBytesSize(1, getMessageBytes());
|
|
|
+ }
|
|
|
+ size += getUnknownFields().getSerializedSize();
|
|
|
+ memoizedSerializedSize = size;
|
|
|
+ return size;
|
|
|
+ }
|
|
|
+
|
|
|
+ private static final long serialVersionUID = 0L;
|
|
|
+ @java.lang.Override
|
|
|
+ protected java.lang.Object writeReplace()
|
|
|
+ throws java.io.ObjectStreamException {
|
|
|
+ return super.writeReplace();
|
|
|
+ }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public boolean equals(final java.lang.Object obj) {
|
|
|
+ if (obj == this) {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto)) {
|
|
|
+ return super.equals(obj);
|
|
|
+ }
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto) obj;
|
|
|
+
|
|
|
+ boolean result = true;
|
|
|
+ result = result && (hasMessage() == other.hasMessage());
|
|
|
+ if (hasMessage()) {
|
|
|
+ result = result && getMessage()
|
|
|
+ .equals(other.getMessage());
|
|
|
+ }
|
|
|
+ result = result &&
|
|
|
+ getUnknownFields().equals(other.getUnknownFields());
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedHashCode = 0;
|
|
|
+ @java.lang.Override
|
|
|
+ public int hashCode() {
|
|
|
+ if (memoizedHashCode != 0) {
|
|
|
+ return memoizedHashCode;
|
|
|
+ }
|
|
|
+ int hash = 41;
|
|
|
+ hash = (19 * hash) + getDescriptorForType().hashCode();
|
|
|
+ if (hasMessage()) {
|
|
|
+ hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
|
|
|
+ hash = (53 * hash) + getMessage().hashCode();
|
|
|
+ }
|
|
|
+ hash = (29 * hash) + getUnknownFields().hashCode();
|
|
|
+ memoizedHashCode = hash;
|
|
|
+ return hash;
|
|
|
+ }
|
|
|
+
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto parseFrom(byte[] data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto parseFrom(
|
|
|
+ byte[] data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto parseFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto parseFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto parseDelimitedFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto parseDelimitedFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static Builder newBuilder() { return Builder.create(); }
|
|
|
+ public Builder newBuilderForType() { return newBuilder(); }
|
|
|
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto prototype) {
|
|
|
+ return newBuilder().mergeFrom(prototype);
|
|
|
+ }
|
|
|
+ public Builder toBuilder() { return newBuilder(this); }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ protected Builder newBuilderForType(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ Builder builder = new Builder(parent);
|
|
|
+ return builder;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.OptRequestProto}
|
|
|
+ */
|
|
|
+ public static final class Builder extends
|
|
|
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
|
|
|
+ implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProtoOrBuilder {
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_OptRequestProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_OptRequestProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto.newBuilder()
|
|
|
+ private Builder() {
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+
|
|
|
+ private Builder(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ super(parent);
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+ private void maybeForceBuilderInitialization() {
|
|
|
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
|
|
|
+ }
|
|
|
+ }
|
|
|
+ private static Builder create() {
|
|
|
+ return new Builder();
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clear() {
|
|
|
+ super.clear();
|
|
|
+ message_ = "";
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clone() {
|
|
|
+ return create().mergeFrom(buildPartial());
|
|
|
+ }
|
|
|
+
|
|
|
+ public com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptorForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_OptRequestProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto getDefaultInstanceForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto.getDefaultInstance();
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto build() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto result = buildPartial();
|
|
|
+ if (!result.isInitialized()) {
|
|
|
+ throw newUninitializedMessageException(result);
|
|
|
+ }
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto buildPartial() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto(this);
|
|
|
+ int from_bitField0_ = bitField0_;
|
|
|
+ int to_bitField0_ = 0;
|
|
|
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ to_bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ result.message_ = message_;
|
|
|
+ result.bitField0_ = to_bitField0_;
|
|
|
+ onBuilt();
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(com.google.protobuf.Message other) {
|
|
|
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto) {
|
|
|
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto)other);
|
|
|
+ } else {
|
|
|
+ super.mergeFrom(other);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto other) {
|
|
|
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto.getDefaultInstance()) return this;
|
|
|
+ if (other.hasMessage()) {
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ message_ = other.message_;
|
|
|
+ onChanged();
|
|
|
+ }
|
|
|
+ this.mergeUnknownFields(other.getUnknownFields());
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto parsedMessage = null;
|
|
|
+ try {
|
|
|
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto) e.getUnfinishedMessage();
|
|
|
+ throw e;
|
|
|
+ } finally {
|
|
|
+ if (parsedMessage != null) {
|
|
|
+ mergeFrom(parsedMessage);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ private int bitField0_;
|
|
|
+
|
|
|
+ // optional string message = 1;
|
|
|
+ private java.lang.Object message_ = "";
|
|
|
+ /**
|
|
|
+ * <code>optional string message = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean hasMessage() {
|
|
|
+ return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional string message = 1;</code>
|
|
|
+ */
|
|
|
+ public java.lang.String getMessage() {
|
|
|
+ java.lang.Object ref = message_;
|
|
|
+ if (!(ref instanceof java.lang.String)) {
|
|
|
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
|
|
|
+ .toStringUtf8();
|
|
|
+ message_ = s;
|
|
|
+ return s;
|
|
|
+ } else {
|
|
|
+ return (java.lang.String) ref;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional string message = 1;</code>
|
|
|
+ */
|
|
|
+ public com.google.protobuf.ByteString
|
|
|
+ getMessageBytes() {
|
|
|
+ java.lang.Object ref = message_;
|
|
|
+ if (ref instanceof String) {
|
|
|
+ com.google.protobuf.ByteString b =
|
|
|
+ com.google.protobuf.ByteString.copyFromUtf8(
|
|
|
+ (java.lang.String) ref);
|
|
|
+ message_ = b;
|
|
|
+ return b;
|
|
|
+ } else {
|
|
|
+ return (com.google.protobuf.ByteString) ref;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional string message = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder setMessage(
|
|
|
+ java.lang.String value) {
|
|
|
+ if (value == null) {
|
|
|
+ throw new NullPointerException();
|
|
|
+ }
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ message_ = value;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional string message = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder clearMessage() {
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ message_ = getDefaultInstance().getMessage();
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional string message = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder setMessageBytes(
|
|
|
+ com.google.protobuf.ByteString value) {
|
|
|
+ if (value == null) {
|
|
|
+ throw new NullPointerException();
|
|
|
+ }
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ message_ = value;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(builder_scope:hadoop.common.OptRequestProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ static {
|
|
|
+ defaultInstance = new OptRequestProto(true);
|
|
|
+ defaultInstance.initFields();
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(class_scope:hadoop.common.OptRequestProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ public interface OptResponseProtoOrBuilder
|
|
|
+ extends com.google.protobuf.MessageOrBuilder {
|
|
|
+
|
|
|
+ // optional string message = 1;
|
|
|
+ /**
|
|
|
+ * <code>optional string message = 1;</code>
|
|
|
+ */
|
|
|
+ boolean hasMessage();
|
|
|
+ /**
|
|
|
+ * <code>optional string message = 1;</code>
|
|
|
+ */
|
|
|
+ java.lang.String getMessage();
|
|
|
+ /**
|
|
|
+ * <code>optional string message = 1;</code>
|
|
|
+ */
|
|
|
+ com.google.protobuf.ByteString
|
|
|
+ getMessageBytes();
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.OptResponseProto}
|
|
|
+ */
|
|
|
+ public static final class OptResponseProto extends
|
|
|
+ com.google.protobuf.GeneratedMessage
|
|
|
+ implements OptResponseProtoOrBuilder {
|
|
|
+ // Use OptResponseProto.newBuilder() to construct.
|
|
|
+ private OptResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
|
|
+ super(builder);
|
|
|
+ this.unknownFields = builder.getUnknownFields();
|
|
|
+ }
|
|
|
+ private OptResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
|
|
+
|
|
|
+ private static final OptResponseProto defaultInstance;
|
|
|
+ public static OptResponseProto getDefaultInstance() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ public OptResponseProto getDefaultInstanceForType() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
|
|
|
+ @java.lang.Override
|
|
|
+ public final com.google.protobuf.UnknownFieldSet
|
|
|
+ getUnknownFields() {
|
|
|
+ return this.unknownFields;
|
|
|
+ }
|
|
|
+ private OptResponseProto(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ initFields();
|
|
|
+ int mutable_bitField0_ = 0;
|
|
|
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
|
|
+ com.google.protobuf.UnknownFieldSet.newBuilder();
|
|
|
+ try {
|
|
|
+ boolean done = false;
|
|
|
+ while (!done) {
|
|
|
+ int tag = input.readTag();
|
|
|
+ switch (tag) {
|
|
|
+ case 0:
|
|
|
+ done = true;
|
|
|
+ break;
|
|
|
+ default: {
|
|
|
+ if (!parseUnknownField(input, unknownFields,
|
|
|
+ extensionRegistry, tag)) {
|
|
|
+ done = true;
|
|
|
+ }
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 10: {
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ message_ = input.readBytes();
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ throw e.setUnfinishedMessage(this);
|
|
|
+ } catch (java.io.IOException e) {
|
|
|
+ throw new com.google.protobuf.InvalidProtocolBufferException(
|
|
|
+ e.getMessage()).setUnfinishedMessage(this);
|
|
|
+ } finally {
|
|
|
+ this.unknownFields = unknownFields.build();
|
|
|
+ makeExtensionsImmutable();
|
|
|
+ }
|
|
|
+ }
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_OptResponseProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_OptResponseProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static com.google.protobuf.Parser<OptResponseProto> PARSER =
|
|
|
+ new com.google.protobuf.AbstractParser<OptResponseProto>() {
|
|
|
+ public OptResponseProto parsePartialFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return new OptResponseProto(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ };
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public com.google.protobuf.Parser<OptResponseProto> getParserForType() {
|
|
|
+ return PARSER;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int bitField0_;
|
|
|
+ // optional string message = 1;
|
|
|
+ public static final int MESSAGE_FIELD_NUMBER = 1;
|
|
|
+ private java.lang.Object message_;
|
|
|
+ /**
|
|
|
+ * <code>optional string message = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean hasMessage() {
|
|
|
+ return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional string message = 1;</code>
|
|
|
+ */
|
|
|
+ public java.lang.String getMessage() {
|
|
|
+ java.lang.Object ref = message_;
|
|
|
+ if (ref instanceof java.lang.String) {
|
|
|
+ return (java.lang.String) ref;
|
|
|
+ } else {
|
|
|
+ com.google.protobuf.ByteString bs =
|
|
|
+ (com.google.protobuf.ByteString) ref;
|
|
|
+ java.lang.String s = bs.toStringUtf8();
|
|
|
+ if (bs.isValidUtf8()) {
|
|
|
+ message_ = s;
|
|
|
+ }
|
|
|
+ return s;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional string message = 1;</code>
|
|
|
+ */
|
|
|
+ public com.google.protobuf.ByteString
|
|
|
+ getMessageBytes() {
|
|
|
+ java.lang.Object ref = message_;
|
|
|
+ if (ref instanceof java.lang.String) {
|
|
|
+ com.google.protobuf.ByteString b =
|
|
|
+ com.google.protobuf.ByteString.copyFromUtf8(
|
|
|
+ (java.lang.String) ref);
|
|
|
+ message_ = b;
|
|
|
+ return b;
|
|
|
+ } else {
|
|
|
+ return (com.google.protobuf.ByteString) ref;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ private void initFields() {
|
|
|
+ message_ = "";
|
|
|
+ }
|
|
|
+ private byte memoizedIsInitialized = -1;
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ byte isInitialized = memoizedIsInitialized;
|
|
|
+ if (isInitialized != -1) return isInitialized == 1;
|
|
|
+
|
|
|
+ memoizedIsInitialized = 1;
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
|
|
|
+ throws java.io.IOException {
|
|
|
+ getSerializedSize();
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ output.writeBytes(1, getMessageBytes());
|
|
|
+ }
|
|
|
+ getUnknownFields().writeTo(output);
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedSerializedSize = -1;
|
|
|
+ public int getSerializedSize() {
|
|
|
+ int size = memoizedSerializedSize;
|
|
|
+ if (size != -1) return size;
|
|
|
+
|
|
|
+ size = 0;
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ size += com.google.protobuf.CodedOutputStream
|
|
|
+ .computeBytesSize(1, getMessageBytes());
|
|
|
+ }
|
|
|
+ size += getUnknownFields().getSerializedSize();
|
|
|
+ memoizedSerializedSize = size;
|
|
|
+ return size;
|
|
|
+ }
|
|
|
+
|
|
|
+ private static final long serialVersionUID = 0L;
|
|
|
+ @java.lang.Override
|
|
|
+ protected java.lang.Object writeReplace()
|
|
|
+ throws java.io.ObjectStreamException {
|
|
|
+ return super.writeReplace();
|
|
|
+ }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public boolean equals(final java.lang.Object obj) {
|
|
|
+ if (obj == this) {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto)) {
|
|
|
+ return super.equals(obj);
|
|
|
+ }
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto) obj;
|
|
|
+
|
|
|
+ boolean result = true;
|
|
|
+ result = result && (hasMessage() == other.hasMessage());
|
|
|
+ if (hasMessage()) {
|
|
|
+ result = result && getMessage()
|
|
|
+ .equals(other.getMessage());
|
|
|
+ }
|
|
|
+ result = result &&
|
|
|
+ getUnknownFields().equals(other.getUnknownFields());
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedHashCode = 0;
|
|
|
+ @java.lang.Override
|
|
|
+ public int hashCode() {
|
|
|
+ if (memoizedHashCode != 0) {
|
|
|
+ return memoizedHashCode;
|
|
|
+ }
|
|
|
+ int hash = 41;
|
|
|
+ hash = (19 * hash) + getDescriptorForType().hashCode();
|
|
|
+ if (hasMessage()) {
|
|
|
+ hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
|
|
|
+ hash = (53 * hash) + getMessage().hashCode();
|
|
|
+ }
|
|
|
+ hash = (29 * hash) + getUnknownFields().hashCode();
|
|
|
+ memoizedHashCode = hash;
|
|
|
+ return hash;
|
|
|
+ }
|
|
|
+
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto parseFrom(byte[] data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto parseFrom(
|
|
|
+ byte[] data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto parseFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto parseFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto parseDelimitedFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto parseDelimitedFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static Builder newBuilder() { return Builder.create(); }
|
|
|
+ public Builder newBuilderForType() { return newBuilder(); }
|
|
|
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto prototype) {
|
|
|
+ return newBuilder().mergeFrom(prototype);
|
|
|
+ }
|
|
|
+ public Builder toBuilder() { return newBuilder(this); }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ protected Builder newBuilderForType(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ Builder builder = new Builder(parent);
|
|
|
+ return builder;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.OptResponseProto}
|
|
|
+ */
|
|
|
+ public static final class Builder extends
|
|
|
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
|
|
|
+ implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProtoOrBuilder {
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_OptResponseProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_OptResponseProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.newBuilder()
|
|
|
+ private Builder() {
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+
|
|
|
+ private Builder(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ super(parent);
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+ private void maybeForceBuilderInitialization() {
|
|
|
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
|
|
|
+ }
|
|
|
+ }
|
|
|
+ private static Builder create() {
|
|
|
+ return new Builder();
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clear() {
|
|
|
+ super.clear();
|
|
|
+ message_ = "";
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clone() {
|
|
|
+ return create().mergeFrom(buildPartial());
|
|
|
+ }
|
|
|
+
|
|
|
+ public com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptorForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_OptResponseProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto getDefaultInstanceForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.getDefaultInstance();
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto build() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto result = buildPartial();
|
|
|
+ if (!result.isInitialized()) {
|
|
|
+ throw newUninitializedMessageException(result);
|
|
|
+ }
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto buildPartial() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto(this);
|
|
|
+ int from_bitField0_ = bitField0_;
|
|
|
+ int to_bitField0_ = 0;
|
|
|
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ to_bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ result.message_ = message_;
|
|
|
+ result.bitField0_ = to_bitField0_;
|
|
|
+ onBuilt();
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(com.google.protobuf.Message other) {
|
|
|
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto) {
|
|
|
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto)other);
|
|
|
+ } else {
|
|
|
+ super.mergeFrom(other);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto other) {
|
|
|
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.getDefaultInstance()) return this;
|
|
|
+ if (other.hasMessage()) {
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ message_ = other.message_;
|
|
|
+ onChanged();
|
|
|
+ }
|
|
|
+ this.mergeUnknownFields(other.getUnknownFields());
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto parsedMessage = null;
|
|
|
+ try {
|
|
|
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto) e.getUnfinishedMessage();
|
|
|
+ throw e;
|
|
|
+ } finally {
|
|
|
+ if (parsedMessage != null) {
|
|
|
+ mergeFrom(parsedMessage);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ private int bitField0_;
|
|
|
+
|
|
|
+ // optional string message = 1;
|
|
|
+ private java.lang.Object message_ = "";
|
|
|
+ /**
|
|
|
+ * <code>optional string message = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean hasMessage() {
|
|
|
+ return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional string message = 1;</code>
|
|
|
+ */
|
|
|
+ public java.lang.String getMessage() {
|
|
|
+ java.lang.Object ref = message_;
|
|
|
+ if (!(ref instanceof java.lang.String)) {
|
|
|
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
|
|
|
+ .toStringUtf8();
|
|
|
+ message_ = s;
|
|
|
+ return s;
|
|
|
+ } else {
|
|
|
+ return (java.lang.String) ref;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional string message = 1;</code>
|
|
|
+ */
|
|
|
+ public com.google.protobuf.ByteString
|
|
|
+ getMessageBytes() {
|
|
|
+ java.lang.Object ref = message_;
|
|
|
+ if (ref instanceof String) {
|
|
|
+ com.google.protobuf.ByteString b =
|
|
|
+ com.google.protobuf.ByteString.copyFromUtf8(
|
|
|
+ (java.lang.String) ref);
|
|
|
+ message_ = b;
|
|
|
+ return b;
|
|
|
+ } else {
|
|
|
+ return (com.google.protobuf.ByteString) ref;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional string message = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder setMessage(
|
|
|
+ java.lang.String value) {
|
|
|
+ if (value == null) {
|
|
|
+ throw new NullPointerException();
|
|
|
+ }
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ message_ = value;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional string message = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder clearMessage() {
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ message_ = getDefaultInstance().getMessage();
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional string message = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder setMessageBytes(
|
|
|
+ com.google.protobuf.ByteString value) {
|
|
|
+ if (value == null) {
|
|
|
+ throw new NullPointerException();
|
|
|
+ }
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ message_ = value;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(builder_scope:hadoop.common.OptResponseProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ static {
|
|
|
+ defaultInstance = new OptResponseProto(true);
|
|
|
+ defaultInstance.initFields();
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(class_scope:hadoop.common.OptResponseProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ public interface SleepRequestProtoOrBuilder
|
|
|
+ extends com.google.protobuf.MessageOrBuilder {
|
|
|
+
|
|
|
+ // required int32 milliSeconds = 1;
|
|
|
+ /**
|
|
|
+ * <code>required int32 milliSeconds = 1;</code>
|
|
|
+ */
|
|
|
+ boolean hasMilliSeconds();
|
|
|
+ /**
|
|
|
+ * <code>required int32 milliSeconds = 1;</code>
|
|
|
+ */
|
|
|
+ int getMilliSeconds();
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.SleepRequestProto}
|
|
|
+ */
|
|
|
+ public static final class SleepRequestProto extends
|
|
|
+ com.google.protobuf.GeneratedMessage
|
|
|
+ implements SleepRequestProtoOrBuilder {
|
|
|
+ // Use SleepRequestProto.newBuilder() to construct.
|
|
|
+ private SleepRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
|
|
+ super(builder);
|
|
|
+ this.unknownFields = builder.getUnknownFields();
|
|
|
+ }
|
|
|
+ private SleepRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
|
|
+
|
|
|
+ private static final SleepRequestProto defaultInstance;
|
|
|
+ public static SleepRequestProto getDefaultInstance() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ public SleepRequestProto getDefaultInstanceForType() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
|
|
|
+ @java.lang.Override
|
|
|
+ public final com.google.protobuf.UnknownFieldSet
|
|
|
+ getUnknownFields() {
|
|
|
+ return this.unknownFields;
|
|
|
+ }
|
|
|
+ private SleepRequestProto(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ initFields();
|
|
|
+ int mutable_bitField0_ = 0;
|
|
|
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
|
|
+ com.google.protobuf.UnknownFieldSet.newBuilder();
|
|
|
+ try {
|
|
|
+ boolean done = false;
|
|
|
+ while (!done) {
|
|
|
+ int tag = input.readTag();
|
|
|
+ switch (tag) {
|
|
|
+ case 0:
|
|
|
+ done = true;
|
|
|
+ break;
|
|
|
+ default: {
|
|
|
+ if (!parseUnknownField(input, unknownFields,
|
|
|
+ extensionRegistry, tag)) {
|
|
|
+ done = true;
|
|
|
+ }
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 8: {
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ milliSeconds_ = input.readInt32();
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ throw e.setUnfinishedMessage(this);
|
|
|
+ } catch (java.io.IOException e) {
|
|
|
+ throw new com.google.protobuf.InvalidProtocolBufferException(
|
|
|
+ e.getMessage()).setUnfinishedMessage(this);
|
|
|
+ } finally {
|
|
|
+ this.unknownFields = unknownFields.build();
|
|
|
+ makeExtensionsImmutable();
|
|
|
+ }
|
|
|
+ }
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepRequestProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepRequestProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static com.google.protobuf.Parser<SleepRequestProto> PARSER =
|
|
|
+ new com.google.protobuf.AbstractParser<SleepRequestProto>() {
|
|
|
+ public SleepRequestProto parsePartialFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return new SleepRequestProto(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ };
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public com.google.protobuf.Parser<SleepRequestProto> getParserForType() {
|
|
|
+ return PARSER;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int bitField0_;
|
|
|
+ // required int32 milliSeconds = 1;
|
|
|
+ public static final int MILLISECONDS_FIELD_NUMBER = 1;
|
|
|
+ private int milliSeconds_;
|
|
|
+ /**
|
|
|
+ * <code>required int32 milliSeconds = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean hasMilliSeconds() {
|
|
|
+ return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required int32 milliSeconds = 1;</code>
|
|
|
+ */
|
|
|
+ public int getMilliSeconds() {
|
|
|
+ return milliSeconds_;
|
|
|
+ }
|
|
|
+
|
|
|
+ private void initFields() {
|
|
|
+ milliSeconds_ = 0;
|
|
|
+ }
|
|
|
+ private byte memoizedIsInitialized = -1;
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ byte isInitialized = memoizedIsInitialized;
|
|
|
+ if (isInitialized != -1) return isInitialized == 1;
|
|
|
+
|
|
|
+ if (!hasMilliSeconds()) {
|
|
|
+ memoizedIsInitialized = 0;
|
|
|
+ return false;
|
|
|
+ }
|
|
|
+ memoizedIsInitialized = 1;
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
|
|
|
+ throws java.io.IOException {
|
|
|
+ getSerializedSize();
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ output.writeInt32(1, milliSeconds_);
|
|
|
+ }
|
|
|
+ getUnknownFields().writeTo(output);
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedSerializedSize = -1;
|
|
|
+ public int getSerializedSize() {
|
|
|
+ int size = memoizedSerializedSize;
|
|
|
+ if (size != -1) return size;
|
|
|
+
|
|
|
+ size = 0;
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ size += com.google.protobuf.CodedOutputStream
|
|
|
+ .computeInt32Size(1, milliSeconds_);
|
|
|
+ }
|
|
|
+ size += getUnknownFields().getSerializedSize();
|
|
|
+ memoizedSerializedSize = size;
|
|
|
+ return size;
|
|
|
+ }
|
|
|
+
|
|
|
+ private static final long serialVersionUID = 0L;
|
|
|
+ @java.lang.Override
|
|
|
+ protected java.lang.Object writeReplace()
|
|
|
+ throws java.io.ObjectStreamException {
|
|
|
+ return super.writeReplace();
|
|
|
+ }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public boolean equals(final java.lang.Object obj) {
|
|
|
+ if (obj == this) {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto)) {
|
|
|
+ return super.equals(obj);
|
|
|
+ }
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto) obj;
|
|
|
+
|
|
|
+ boolean result = true;
|
|
|
+ result = result && (hasMilliSeconds() == other.hasMilliSeconds());
|
|
|
+ if (hasMilliSeconds()) {
|
|
|
+ result = result && (getMilliSeconds()
|
|
|
+ == other.getMilliSeconds());
|
|
|
+ }
|
|
|
+ result = result &&
|
|
|
+ getUnknownFields().equals(other.getUnknownFields());
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedHashCode = 0;
|
|
|
+ @java.lang.Override
|
|
|
+ public int hashCode() {
|
|
|
+ if (memoizedHashCode != 0) {
|
|
|
+ return memoizedHashCode;
|
|
|
+ }
|
|
|
+ int hash = 41;
|
|
|
+ hash = (19 * hash) + getDescriptorForType().hashCode();
|
|
|
+ if (hasMilliSeconds()) {
|
|
|
+ hash = (37 * hash) + MILLISECONDS_FIELD_NUMBER;
|
|
|
+ hash = (53 * hash) + getMilliSeconds();
|
|
|
+ }
|
|
|
+ hash = (29 * hash) + getUnknownFields().hashCode();
|
|
|
+ memoizedHashCode = hash;
|
|
|
+ return hash;
|
|
|
+ }
|
|
|
+
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto parseFrom(byte[] data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto parseFrom(
|
|
|
+ byte[] data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto parseFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto parseFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto parseDelimitedFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto parseDelimitedFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static Builder newBuilder() { return Builder.create(); }
|
|
|
+ public Builder newBuilderForType() { return newBuilder(); }
|
|
|
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto prototype) {
|
|
|
+ return newBuilder().mergeFrom(prototype);
|
|
|
+ }
|
|
|
+ public Builder toBuilder() { return newBuilder(this); }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ protected Builder newBuilderForType(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ Builder builder = new Builder(parent);
|
|
|
+ return builder;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.SleepRequestProto}
|
|
|
+ */
|
|
|
+ public static final class Builder extends
|
|
|
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
|
|
|
+ implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProtoOrBuilder {
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepRequestProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepRequestProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.newBuilder()
|
|
|
+ private Builder() {
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+
|
|
|
+ private Builder(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ super(parent);
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+ private void maybeForceBuilderInitialization() {
|
|
|
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
|
|
|
+ }
|
|
|
+ }
|
|
|
+ private static Builder create() {
|
|
|
+ return new Builder();
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clear() {
|
|
|
+ super.clear();
|
|
|
+ milliSeconds_ = 0;
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clone() {
|
|
|
+ return create().mergeFrom(buildPartial());
|
|
|
+ }
|
|
|
+
|
|
|
+ public com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptorForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepRequestProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto getDefaultInstanceForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.getDefaultInstance();
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto build() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto result = buildPartial();
|
|
|
+ if (!result.isInitialized()) {
|
|
|
+ throw newUninitializedMessageException(result);
|
|
|
+ }
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto buildPartial() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto(this);
|
|
|
+ int from_bitField0_ = bitField0_;
|
|
|
+ int to_bitField0_ = 0;
|
|
|
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ to_bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ result.milliSeconds_ = milliSeconds_;
|
|
|
+ result.bitField0_ = to_bitField0_;
|
|
|
+ onBuilt();
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(com.google.protobuf.Message other) {
|
|
|
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto) {
|
|
|
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto)other);
|
|
|
+ } else {
|
|
|
+ super.mergeFrom(other);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto other) {
|
|
|
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.getDefaultInstance()) return this;
|
|
|
+ if (other.hasMilliSeconds()) {
|
|
|
+ setMilliSeconds(other.getMilliSeconds());
|
|
|
+ }
|
|
|
+ this.mergeUnknownFields(other.getUnknownFields());
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ if (!hasMilliSeconds()) {
|
|
|
+ return false;
|
|
|
+ }
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto parsedMessage = null;
|
|
|
+ try {
|
|
|
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto) e.getUnfinishedMessage();
|
|
|
+ throw e;
|
|
|
+ } finally {
|
|
|
+ if (parsedMessage != null) {
|
|
|
+ mergeFrom(parsedMessage);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ private int bitField0_;
|
|
|
+
|
|
|
+ // required int32 milliSeconds = 1;
|
|
|
+ private int milliSeconds_ ;
|
|
|
+ /**
|
|
|
+ * <code>required int32 milliSeconds = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean hasMilliSeconds() {
|
|
|
+ return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required int32 milliSeconds = 1;</code>
|
|
|
+ */
|
|
|
+ public int getMilliSeconds() {
|
|
|
+ return milliSeconds_;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required int32 milliSeconds = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder setMilliSeconds(int value) {
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ milliSeconds_ = value;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required int32 milliSeconds = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder clearMilliSeconds() {
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ milliSeconds_ = 0;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(builder_scope:hadoop.common.SleepRequestProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ static {
|
|
|
+ defaultInstance = new SleepRequestProto(true);
|
|
|
+ defaultInstance.initFields();
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(class_scope:hadoop.common.SleepRequestProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ public interface SleepResponseProtoOrBuilder
|
|
|
+ extends com.google.protobuf.MessageOrBuilder {
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.SleepResponseProto}
|
|
|
+ */
|
|
|
+ public static final class SleepResponseProto extends
|
|
|
+ com.google.protobuf.GeneratedMessage
|
|
|
+ implements SleepResponseProtoOrBuilder {
|
|
|
+ // Use SleepResponseProto.newBuilder() to construct.
|
|
|
+ private SleepResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
|
|
+ super(builder);
|
|
|
+ this.unknownFields = builder.getUnknownFields();
|
|
|
+ }
|
|
|
+ private SleepResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
|
|
+
|
|
|
+ private static final SleepResponseProto defaultInstance;
|
|
|
+ public static SleepResponseProto getDefaultInstance() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ public SleepResponseProto getDefaultInstanceForType() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
|
|
|
+ @java.lang.Override
|
|
|
+ public final com.google.protobuf.UnknownFieldSet
|
|
|
+ getUnknownFields() {
|
|
|
+ return this.unknownFields;
|
|
|
+ }
|
|
|
+ private SleepResponseProto(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ initFields();
|
|
|
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
|
|
+ com.google.protobuf.UnknownFieldSet.newBuilder();
|
|
|
+ try {
|
|
|
+ boolean done = false;
|
|
|
+ while (!done) {
|
|
|
+ int tag = input.readTag();
|
|
|
+ switch (tag) {
|
|
|
+ case 0:
|
|
|
+ done = true;
|
|
|
+ break;
|
|
|
+ default: {
|
|
|
+ if (!parseUnknownField(input, unknownFields,
|
|
|
+ extensionRegistry, tag)) {
|
|
|
+ done = true;
|
|
|
+ }
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ throw e.setUnfinishedMessage(this);
|
|
|
+ } catch (java.io.IOException e) {
|
|
|
+ throw new com.google.protobuf.InvalidProtocolBufferException(
|
|
|
+ e.getMessage()).setUnfinishedMessage(this);
|
|
|
+ } finally {
|
|
|
+ this.unknownFields = unknownFields.build();
|
|
|
+ makeExtensionsImmutable();
|
|
|
+ }
|
|
|
+ }
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepResponseProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepResponseProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static com.google.protobuf.Parser<SleepResponseProto> PARSER =
|
|
|
+ new com.google.protobuf.AbstractParser<SleepResponseProto>() {
|
|
|
+ public SleepResponseProto parsePartialFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return new SleepResponseProto(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ };
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public com.google.protobuf.Parser<SleepResponseProto> getParserForType() {
|
|
|
+ return PARSER;
|
|
|
+ }
|
|
|
+
|
|
|
+ private void initFields() {
|
|
|
+ }
|
|
|
+ private byte memoizedIsInitialized = -1;
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ byte isInitialized = memoizedIsInitialized;
|
|
|
+ if (isInitialized != -1) return isInitialized == 1;
|
|
|
+
|
|
|
+ memoizedIsInitialized = 1;
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
|
|
|
+ throws java.io.IOException {
|
|
|
+ getSerializedSize();
|
|
|
+ getUnknownFields().writeTo(output);
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedSerializedSize = -1;
|
|
|
+ public int getSerializedSize() {
|
|
|
+ int size = memoizedSerializedSize;
|
|
|
+ if (size != -1) return size;
|
|
|
+
|
|
|
+ size = 0;
|
|
|
+ size += getUnknownFields().getSerializedSize();
|
|
|
+ memoizedSerializedSize = size;
|
|
|
+ return size;
|
|
|
+ }
|
|
|
+
|
|
|
+ private static final long serialVersionUID = 0L;
|
|
|
+ @java.lang.Override
|
|
|
+ protected java.lang.Object writeReplace()
|
|
|
+ throws java.io.ObjectStreamException {
|
|
|
+ return super.writeReplace();
|
|
|
+ }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public boolean equals(final java.lang.Object obj) {
|
|
|
+ if (obj == this) {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto)) {
|
|
|
+ return super.equals(obj);
|
|
|
+ }
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto) obj;
|
|
|
+
|
|
|
+ boolean result = true;
|
|
|
+ result = result &&
|
|
|
+ getUnknownFields().equals(other.getUnknownFields());
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedHashCode = 0;
|
|
|
+ @java.lang.Override
|
|
|
+ public int hashCode() {
|
|
|
+ if (memoizedHashCode != 0) {
|
|
|
+ return memoizedHashCode;
|
|
|
+ }
|
|
|
+ int hash = 41;
|
|
|
+ hash = (19 * hash) + getDescriptorForType().hashCode();
|
|
|
+ hash = (29 * hash) + getUnknownFields().hashCode();
|
|
|
+ memoizedHashCode = hash;
|
|
|
+ return hash;
|
|
|
+ }
|
|
|
+
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto parseFrom(byte[] data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto parseFrom(
|
|
|
+ byte[] data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto parseFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto parseFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto parseDelimitedFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto parseDelimitedFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static Builder newBuilder() { return Builder.create(); }
|
|
|
+ public Builder newBuilderForType() { return newBuilder(); }
|
|
|
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto prototype) {
|
|
|
+ return newBuilder().mergeFrom(prototype);
|
|
|
+ }
|
|
|
+ public Builder toBuilder() { return newBuilder(this); }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ protected Builder newBuilderForType(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ Builder builder = new Builder(parent);
|
|
|
+ return builder;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.SleepResponseProto}
|
|
|
+ */
|
|
|
+ public static final class Builder extends
|
|
|
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
|
|
|
+ implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProtoOrBuilder {
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepResponseProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepResponseProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.newBuilder()
|
|
|
+ private Builder() {
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+
|
|
|
+ private Builder(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ super(parent);
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+ private void maybeForceBuilderInitialization() {
|
|
|
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
|
|
|
+ }
|
|
|
+ }
|
|
|
+ private static Builder create() {
|
|
|
+ return new Builder();
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clear() {
|
|
|
+ super.clear();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clone() {
|
|
|
+ return create().mergeFrom(buildPartial());
|
|
|
+ }
|
|
|
+
|
|
|
+ public com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptorForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepResponseProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto getDefaultInstanceForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.getDefaultInstance();
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto build() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto result = buildPartial();
|
|
|
+ if (!result.isInitialized()) {
|
|
|
+ throw newUninitializedMessageException(result);
|
|
|
+ }
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto buildPartial() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto(this);
|
|
|
+ onBuilt();
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(com.google.protobuf.Message other) {
|
|
|
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto) {
|
|
|
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto)other);
|
|
|
+ } else {
|
|
|
+ super.mergeFrom(other);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto other) {
|
|
|
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.getDefaultInstance()) return this;
|
|
|
+ this.mergeUnknownFields(other.getUnknownFields());
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto parsedMessage = null;
|
|
|
+ try {
|
|
|
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto) e.getUnfinishedMessage();
|
|
|
+ throw e;
|
|
|
+ } finally {
|
|
|
+ if (parsedMessage != null) {
|
|
|
+ mergeFrom(parsedMessage);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(builder_scope:hadoop.common.SleepResponseProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ static {
|
|
|
+ defaultInstance = new SleepResponseProto(true);
|
|
|
+ defaultInstance.initFields();
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(class_scope:hadoop.common.SleepResponseProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ public interface SlowPingRequestProtoOrBuilder
|
|
|
+ extends com.google.protobuf.MessageOrBuilder {
|
|
|
+
|
|
|
+ // required bool shouldSlow = 1;
|
|
|
+ /**
|
|
|
+ * <code>required bool shouldSlow = 1;</code>
|
|
|
+ */
|
|
|
+ boolean hasShouldSlow();
|
|
|
+ /**
|
|
|
+ * <code>required bool shouldSlow = 1;</code>
|
|
|
+ */
|
|
|
+ boolean getShouldSlow();
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.SlowPingRequestProto}
|
|
|
+ */
|
|
|
+ public static final class SlowPingRequestProto extends
|
|
|
+ com.google.protobuf.GeneratedMessage
|
|
|
+ implements SlowPingRequestProtoOrBuilder {
|
|
|
+ // Use SlowPingRequestProto.newBuilder() to construct.
|
|
|
+ private SlowPingRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
|
|
+ super(builder);
|
|
|
+ this.unknownFields = builder.getUnknownFields();
|
|
|
+ }
|
|
|
+ private SlowPingRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
|
|
+
|
|
|
+ private static final SlowPingRequestProto defaultInstance;
|
|
|
+ public static SlowPingRequestProto getDefaultInstance() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ public SlowPingRequestProto getDefaultInstanceForType() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
|
|
|
+ @java.lang.Override
|
|
|
+ public final com.google.protobuf.UnknownFieldSet
|
|
|
+ getUnknownFields() {
|
|
|
+ return this.unknownFields;
|
|
|
+ }
|
|
|
+ private SlowPingRequestProto(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ initFields();
|
|
|
+ int mutable_bitField0_ = 0;
|
|
|
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
|
|
+ com.google.protobuf.UnknownFieldSet.newBuilder();
|
|
|
+ try {
|
|
|
+ boolean done = false;
|
|
|
+ while (!done) {
|
|
|
+ int tag = input.readTag();
|
|
|
+ switch (tag) {
|
|
|
+ case 0:
|
|
|
+ done = true;
|
|
|
+ break;
|
|
|
+ default: {
|
|
|
+ if (!parseUnknownField(input, unknownFields,
|
|
|
+ extensionRegistry, tag)) {
|
|
|
+ done = true;
|
|
|
+ }
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 8: {
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ shouldSlow_ = input.readBool();
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ throw e.setUnfinishedMessage(this);
|
|
|
+ } catch (java.io.IOException e) {
|
|
|
+ throw new com.google.protobuf.InvalidProtocolBufferException(
|
|
|
+ e.getMessage()).setUnfinishedMessage(this);
|
|
|
+ } finally {
|
|
|
+ this.unknownFields = unknownFields.build();
|
|
|
+ makeExtensionsImmutable();
|
|
|
+ }
|
|
|
+ }
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SlowPingRequestProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SlowPingRequestProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static com.google.protobuf.Parser<SlowPingRequestProto> PARSER =
|
|
|
+ new com.google.protobuf.AbstractParser<SlowPingRequestProto>() {
|
|
|
+ public SlowPingRequestProto parsePartialFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return new SlowPingRequestProto(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ };
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public com.google.protobuf.Parser<SlowPingRequestProto> getParserForType() {
|
|
|
+ return PARSER;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int bitField0_;
|
|
|
+ // required bool shouldSlow = 1;
|
|
|
+ public static final int SHOULDSLOW_FIELD_NUMBER = 1;
|
|
|
+ private boolean shouldSlow_;
|
|
|
+ /**
|
|
|
+ * <code>required bool shouldSlow = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean hasShouldSlow() {
|
|
|
+ return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required bool shouldSlow = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean getShouldSlow() {
|
|
|
+ return shouldSlow_;
|
|
|
+ }
|
|
|
+
|
|
|
+ private void initFields() {
|
|
|
+ shouldSlow_ = false;
|
|
|
+ }
|
|
|
+ private byte memoizedIsInitialized = -1;
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ byte isInitialized = memoizedIsInitialized;
|
|
|
+ if (isInitialized != -1) return isInitialized == 1;
|
|
|
+
|
|
|
+ if (!hasShouldSlow()) {
|
|
|
+ memoizedIsInitialized = 0;
|
|
|
+ return false;
|
|
|
+ }
|
|
|
+ memoizedIsInitialized = 1;
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
|
|
|
+ throws java.io.IOException {
|
|
|
+ getSerializedSize();
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ output.writeBool(1, shouldSlow_);
|
|
|
+ }
|
|
|
+ getUnknownFields().writeTo(output);
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedSerializedSize = -1;
|
|
|
+ public int getSerializedSize() {
|
|
|
+ int size = memoizedSerializedSize;
|
|
|
+ if (size != -1) return size;
|
|
|
+
|
|
|
+ size = 0;
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ size += com.google.protobuf.CodedOutputStream
|
|
|
+ .computeBoolSize(1, shouldSlow_);
|
|
|
+ }
|
|
|
+ size += getUnknownFields().getSerializedSize();
|
|
|
+ memoizedSerializedSize = size;
|
|
|
+ return size;
|
|
|
+ }
|
|
|
+
|
|
|
+ private static final long serialVersionUID = 0L;
|
|
|
+ @java.lang.Override
|
|
|
+ protected java.lang.Object writeReplace()
|
|
|
+ throws java.io.ObjectStreamException {
|
|
|
+ return super.writeReplace();
|
|
|
+ }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public boolean equals(final java.lang.Object obj) {
|
|
|
+ if (obj == this) {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto)) {
|
|
|
+ return super.equals(obj);
|
|
|
+ }
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto) obj;
|
|
|
+
|
|
|
+ boolean result = true;
|
|
|
+ result = result && (hasShouldSlow() == other.hasShouldSlow());
|
|
|
+ if (hasShouldSlow()) {
|
|
|
+ result = result && (getShouldSlow()
|
|
|
+ == other.getShouldSlow());
|
|
|
+ }
|
|
|
+ result = result &&
|
|
|
+ getUnknownFields().equals(other.getUnknownFields());
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedHashCode = 0;
|
|
|
+ @java.lang.Override
|
|
|
+ public int hashCode() {
|
|
|
+ if (memoizedHashCode != 0) {
|
|
|
+ return memoizedHashCode;
|
|
|
+ }
|
|
|
+ int hash = 41;
|
|
|
+ hash = (19 * hash) + getDescriptorForType().hashCode();
|
|
|
+ if (hasShouldSlow()) {
|
|
|
+ hash = (37 * hash) + SHOULDSLOW_FIELD_NUMBER;
|
|
|
+ hash = (53 * hash) + hashBoolean(getShouldSlow());
|
|
|
+ }
|
|
|
+ hash = (29 * hash) + getUnknownFields().hashCode();
|
|
|
+ memoizedHashCode = hash;
|
|
|
+ return hash;
|
|
|
+ }
|
|
|
+
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto parseFrom(byte[] data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto parseFrom(
|
|
|
+ byte[] data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto parseFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto parseFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto parseDelimitedFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto parseDelimitedFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static Builder newBuilder() { return Builder.create(); }
|
|
|
+ public Builder newBuilderForType() { return newBuilder(); }
|
|
|
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto prototype) {
|
|
|
+ return newBuilder().mergeFrom(prototype);
|
|
|
+ }
|
|
|
+ public Builder toBuilder() { return newBuilder(this); }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ protected Builder newBuilderForType(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ Builder builder = new Builder(parent);
|
|
|
+ return builder;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.SlowPingRequestProto}
|
|
|
+ */
|
|
|
+ public static final class Builder extends
|
|
|
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
|
|
|
+ implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProtoOrBuilder {
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SlowPingRequestProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SlowPingRequestProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto.newBuilder()
|
|
|
+ private Builder() {
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+
|
|
|
+ private Builder(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ super(parent);
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+ private void maybeForceBuilderInitialization() {
|
|
|
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
|
|
|
+ }
|
|
|
+ }
|
|
|
+ private static Builder create() {
|
|
|
+ return new Builder();
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clear() {
|
|
|
+ super.clear();
|
|
|
+ shouldSlow_ = false;
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clone() {
|
|
|
+ return create().mergeFrom(buildPartial());
|
|
|
+ }
|
|
|
+
|
|
|
+ public com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptorForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SlowPingRequestProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto getDefaultInstanceForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto.getDefaultInstance();
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto build() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto result = buildPartial();
|
|
|
+ if (!result.isInitialized()) {
|
|
|
+ throw newUninitializedMessageException(result);
|
|
|
+ }
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto buildPartial() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto(this);
|
|
|
+ int from_bitField0_ = bitField0_;
|
|
|
+ int to_bitField0_ = 0;
|
|
|
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ to_bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ result.shouldSlow_ = shouldSlow_;
|
|
|
+ result.bitField0_ = to_bitField0_;
|
|
|
+ onBuilt();
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(com.google.protobuf.Message other) {
|
|
|
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto) {
|
|
|
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto)other);
|
|
|
+ } else {
|
|
|
+ super.mergeFrom(other);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto other) {
|
|
|
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto.getDefaultInstance()) return this;
|
|
|
+ if (other.hasShouldSlow()) {
|
|
|
+ setShouldSlow(other.getShouldSlow());
|
|
|
+ }
|
|
|
+ this.mergeUnknownFields(other.getUnknownFields());
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ if (!hasShouldSlow()) {
|
|
|
+
|
|
|
+ return false;
|
|
|
+ }
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto parsedMessage = null;
|
|
|
+ try {
|
|
|
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto) e.getUnfinishedMessage();
|
|
|
+ throw e;
|
|
|
+ } finally {
|
|
|
+ if (parsedMessage != null) {
|
|
|
+ mergeFrom(parsedMessage);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ private int bitField0_;
|
|
|
+
|
|
|
+ // required bool shouldSlow = 1;
|
|
|
+ private boolean shouldSlow_ ;
|
|
|
+ /**
|
|
|
+ * <code>required bool shouldSlow = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean hasShouldSlow() {
|
|
|
+ return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required bool shouldSlow = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean getShouldSlow() {
|
|
|
+ return shouldSlow_;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required bool shouldSlow = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder setShouldSlow(boolean value) {
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ shouldSlow_ = value;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required bool shouldSlow = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder clearShouldSlow() {
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ shouldSlow_ = false;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(builder_scope:hadoop.common.SlowPingRequestProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ static {
|
|
|
+ defaultInstance = new SlowPingRequestProto(true);
|
|
|
+ defaultInstance.initFields();
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(class_scope:hadoop.common.SlowPingRequestProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ public interface EchoRequestProto2OrBuilder
|
|
|
+ extends com.google.protobuf.MessageOrBuilder {
|
|
|
+
|
|
|
+ // repeated string message = 1;
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ java.util.List<java.lang.String>
|
|
|
+ getMessageList();
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ int getMessageCount();
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ java.lang.String getMessage(int index);
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ com.google.protobuf.ByteString
|
|
|
+ getMessageBytes(int index);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.EchoRequestProto2}
|
|
|
+ */
|
|
|
+ public static final class EchoRequestProto2 extends
|
|
|
+ com.google.protobuf.GeneratedMessage
|
|
|
+ implements EchoRequestProto2OrBuilder {
|
|
|
+ // Use EchoRequestProto2.newBuilder() to construct.
|
|
|
+ private EchoRequestProto2(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
|
|
+ super(builder);
|
|
|
+ this.unknownFields = builder.getUnknownFields();
|
|
|
+ }
|
|
|
+ private EchoRequestProto2(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
|
|
+
|
|
|
+ private static final EchoRequestProto2 defaultInstance;
|
|
|
+ public static EchoRequestProto2 getDefaultInstance() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ public EchoRequestProto2 getDefaultInstanceForType() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
|
|
|
+ @java.lang.Override
|
|
|
+ public final com.google.protobuf.UnknownFieldSet
|
|
|
+ getUnknownFields() {
|
|
|
+ return this.unknownFields;
|
|
|
+ }
|
|
|
+ private EchoRequestProto2(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ initFields();
|
|
|
+ int mutable_bitField0_ = 0;
|
|
|
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
|
|
+ com.google.protobuf.UnknownFieldSet.newBuilder();
|
|
|
+ try {
|
|
|
+ boolean done = false;
|
|
|
+ while (!done) {
|
|
|
+ int tag = input.readTag();
|
|
|
+ switch (tag) {
|
|
|
+ case 0:
|
|
|
+ done = true;
|
|
|
+ break;
|
|
|
+ default: {
|
|
|
+ if (!parseUnknownField(input, unknownFields,
|
|
|
+ extensionRegistry, tag)) {
|
|
|
+ done = true;
|
|
|
+ }
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 10: {
|
|
|
+ if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ message_ = new com.google.protobuf.LazyStringArrayList();
|
|
|
+ mutable_bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ message_.add(input.readBytes());
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ throw e.setUnfinishedMessage(this);
|
|
|
+ } catch (java.io.IOException e) {
|
|
|
+ throw new com.google.protobuf.InvalidProtocolBufferException(
|
|
|
+ e.getMessage()).setUnfinishedMessage(this);
|
|
|
+ } finally {
|
|
|
+ if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ message_ = new com.google.protobuf.UnmodifiableLazyStringList(message_);
|
|
|
+ }
|
|
|
+ this.unknownFields = unknownFields.build();
|
|
|
+ makeExtensionsImmutable();
|
|
|
+ }
|
|
|
+ }
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoRequestProto2_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoRequestProto2_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static com.google.protobuf.Parser<EchoRequestProto2> PARSER =
|
|
|
+ new com.google.protobuf.AbstractParser<EchoRequestProto2>() {
|
|
|
+ public EchoRequestProto2 parsePartialFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return new EchoRequestProto2(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ };
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public com.google.protobuf.Parser<EchoRequestProto2> getParserForType() {
|
|
|
+ return PARSER;
|
|
|
+ }
|
|
|
+
|
|
|
+ // repeated string message = 1;
|
|
|
+ public static final int MESSAGE_FIELD_NUMBER = 1;
|
|
|
+ private com.google.protobuf.LazyStringList message_;
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public java.util.List<java.lang.String>
|
|
|
+ getMessageList() {
|
|
|
+ return message_;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public int getMessageCount() {
|
|
|
+ return message_.size();
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public java.lang.String getMessage(int index) {
|
|
|
+ return message_.get(index);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public com.google.protobuf.ByteString
|
|
|
+ getMessageBytes(int index) {
|
|
|
+ return message_.getByteString(index);
|
|
|
+ }
|
|
|
+
|
|
|
+ private void initFields() {
|
|
|
+ message_ = com.google.protobuf.LazyStringArrayList.EMPTY;
|
|
|
+ }
|
|
|
+ private byte memoizedIsInitialized = -1;
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ byte isInitialized = memoizedIsInitialized;
|
|
|
+ if (isInitialized != -1) return isInitialized == 1;
|
|
|
+
|
|
|
+ memoizedIsInitialized = 1;
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
|
|
|
+ throws java.io.IOException {
|
|
|
+ getSerializedSize();
|
|
|
+ for (int i = 0; i < message_.size(); i++) {
|
|
|
+ output.writeBytes(1, message_.getByteString(i));
|
|
|
+ }
|
|
|
+ getUnknownFields().writeTo(output);
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedSerializedSize = -1;
|
|
|
+ public int getSerializedSize() {
|
|
|
+ int size = memoizedSerializedSize;
|
|
|
+ if (size != -1) return size;
|
|
|
+
|
|
|
+ size = 0;
|
|
|
+ {
|
|
|
+ int dataSize = 0;
|
|
|
+ for (int i = 0; i < message_.size(); i++) {
|
|
|
+ dataSize += com.google.protobuf.CodedOutputStream
|
|
|
+ .computeBytesSizeNoTag(message_.getByteString(i));
|
|
|
+ }
|
|
|
+ size += dataSize;
|
|
|
+ size += 1 * getMessageList().size();
|
|
|
+ }
|
|
|
+ size += getUnknownFields().getSerializedSize();
|
|
|
+ memoizedSerializedSize = size;
|
|
|
+ return size;
|
|
|
+ }
|
|
|
+
|
|
|
+ private static final long serialVersionUID = 0L;
|
|
|
+ @java.lang.Override
|
|
|
+ protected java.lang.Object writeReplace()
|
|
|
+ throws java.io.ObjectStreamException {
|
|
|
+ return super.writeReplace();
|
|
|
+ }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public boolean equals(final java.lang.Object obj) {
|
|
|
+ if (obj == this) {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2)) {
|
|
|
+ return super.equals(obj);
|
|
|
+ }
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2) obj;
|
|
|
+
|
|
|
+ boolean result = true;
|
|
|
+ result = result && getMessageList()
|
|
|
+ .equals(other.getMessageList());
|
|
|
+ result = result &&
|
|
|
+ getUnknownFields().equals(other.getUnknownFields());
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedHashCode = 0;
|
|
|
+ @java.lang.Override
|
|
|
+ public int hashCode() {
|
|
|
+ if (memoizedHashCode != 0) {
|
|
|
+ return memoizedHashCode;
|
|
|
+ }
|
|
|
+ int hash = 41;
|
|
|
+ hash = (19 * hash) + getDescriptorForType().hashCode();
|
|
|
+ if (getMessageCount() > 0) {
|
|
|
+ hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
|
|
|
+ hash = (53 * hash) + getMessageList().hashCode();
|
|
|
+ }
|
|
|
+ hash = (29 * hash) + getUnknownFields().hashCode();
|
|
|
+ memoizedHashCode = hash;
|
|
|
+ return hash;
|
|
|
+ }
|
|
|
+
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 parseFrom(
|
|
|
+ com.google.protobuf.ByteString data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 parseFrom(
|
|
|
+ com.google.protobuf.ByteString data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 parseFrom(byte[] data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 parseFrom(
|
|
|
+ byte[] data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 parseFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 parseFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 parseDelimitedFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 parseDelimitedFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static Builder newBuilder() { return Builder.create(); }
|
|
|
+ public Builder newBuilderForType() { return newBuilder(); }
|
|
|
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 prototype) {
|
|
|
+ return newBuilder().mergeFrom(prototype);
|
|
|
+ }
|
|
|
+ public Builder toBuilder() { return newBuilder(this); }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ protected Builder newBuilderForType(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ Builder builder = new Builder(parent);
|
|
|
+ return builder;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.EchoRequestProto2}
|
|
|
+ */
|
|
|
+ public static final class Builder extends
|
|
|
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
|
|
|
+ implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2OrBuilder {
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoRequestProto2_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoRequestProto2_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2.newBuilder()
|
|
|
+ private Builder() {
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+
|
|
|
+ private Builder(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ super(parent);
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+ private void maybeForceBuilderInitialization() {
|
|
|
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
|
|
|
+ }
|
|
|
+ }
|
|
|
+ private static Builder create() {
|
|
|
+ return new Builder();
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clear() {
|
|
|
+ super.clear();
|
|
|
+ message_ = com.google.protobuf.LazyStringArrayList.EMPTY;
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clone() {
|
|
|
+ return create().mergeFrom(buildPartial());
|
|
|
+ }
|
|
|
+
|
|
|
+ public com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptorForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoRequestProto2_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 getDefaultInstanceForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2.getDefaultInstance();
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 build() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 result = buildPartial();
|
|
|
+ if (!result.isInitialized()) {
|
|
|
+ throw newUninitializedMessageException(result);
|
|
|
+ }
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 buildPartial() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2(this);
|
|
|
+ int from_bitField0_ = bitField0_;
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ message_ = new com.google.protobuf.UnmodifiableLazyStringList(
|
|
|
+ message_);
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ }
|
|
|
+ result.message_ = message_;
|
|
|
+ onBuilt();
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(com.google.protobuf.Message other) {
|
|
|
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2) {
|
|
|
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2)other);
|
|
|
+ } else {
|
|
|
+ super.mergeFrom(other);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 other) {
|
|
|
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2.getDefaultInstance()) return this;
|
|
|
+ if (!other.message_.isEmpty()) {
|
|
|
+ if (message_.isEmpty()) {
|
|
|
+ message_ = other.message_;
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ } else {
|
|
|
+ ensureMessageIsMutable();
|
|
|
+ message_.addAll(other.message_);
|
|
|
+ }
|
|
|
+ onChanged();
|
|
|
+ }
|
|
|
+ this.mergeUnknownFields(other.getUnknownFields());
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 parsedMessage = null;
|
|
|
+ try {
|
|
|
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2) e.getUnfinishedMessage();
|
|
|
+ throw e;
|
|
|
+ } finally {
|
|
|
+ if (parsedMessage != null) {
|
|
|
+ mergeFrom(parsedMessage);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ private int bitField0_;
|
|
|
+
|
|
|
+ // repeated string message = 1;
|
|
|
+ private com.google.protobuf.LazyStringList message_ = com.google.protobuf.LazyStringArrayList.EMPTY;
|
|
|
+ private void ensureMessageIsMutable() {
|
|
|
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ message_ = new com.google.protobuf.LazyStringArrayList(message_);
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public java.util.List<java.lang.String>
|
|
|
+ getMessageList() {
|
|
|
+ return java.util.Collections.unmodifiableList(message_);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public int getMessageCount() {
|
|
|
+ return message_.size();
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public java.lang.String getMessage(int index) {
|
|
|
+ return message_.get(index);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public com.google.protobuf.ByteString
|
|
|
+ getMessageBytes(int index) {
|
|
|
+ return message_.getByteString(index);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder setMessage(
|
|
|
+ int index, java.lang.String value) {
|
|
|
+ if (value == null) {
|
|
|
+ throw new NullPointerException();
|
|
|
+ }
|
|
|
+ ensureMessageIsMutable();
|
|
|
+ message_.set(index, value);
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder addMessage(
|
|
|
+ java.lang.String value) {
|
|
|
+ if (value == null) {
|
|
|
+ throw new NullPointerException();
|
|
|
+ }
|
|
|
+ ensureMessageIsMutable();
|
|
|
+ message_.add(value);
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder addAllMessage(
|
|
|
+ java.lang.Iterable<java.lang.String> values) {
|
|
|
+ ensureMessageIsMutable();
|
|
|
+ super.addAll(values, message_);
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder clearMessage() {
|
|
|
+ message_ = com.google.protobuf.LazyStringArrayList.EMPTY;
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder addMessageBytes(
|
|
|
+ com.google.protobuf.ByteString value) {
|
|
|
+ if (value == null) {
|
|
|
+ throw new NullPointerException();
|
|
|
+ }
|
|
|
+ ensureMessageIsMutable();
|
|
|
+ message_.add(value);
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(builder_scope:hadoop.common.EchoRequestProto2)
|
|
|
+ }
|
|
|
+
|
|
|
+ static {
|
|
|
+ defaultInstance = new EchoRequestProto2(true);
|
|
|
+ defaultInstance.initFields();
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(class_scope:hadoop.common.EchoRequestProto2)
|
|
|
+ }
|
|
|
+
|
|
|
+ public interface EchoResponseProto2OrBuilder
|
|
|
+ extends com.google.protobuf.MessageOrBuilder {
|
|
|
+
|
|
|
+ // repeated string message = 1;
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ java.util.List<java.lang.String>
|
|
|
+ getMessageList();
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ int getMessageCount();
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ java.lang.String getMessage(int index);
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ com.google.protobuf.ByteString
|
|
|
+ getMessageBytes(int index);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.EchoResponseProto2}
|
|
|
+ */
|
|
|
+ public static final class EchoResponseProto2 extends
|
|
|
+ com.google.protobuf.GeneratedMessage
|
|
|
+ implements EchoResponseProto2OrBuilder {
|
|
|
+ // Use EchoResponseProto2.newBuilder() to construct.
|
|
|
+ private EchoResponseProto2(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
|
|
+ super(builder);
|
|
|
+ this.unknownFields = builder.getUnknownFields();
|
|
|
+ }
|
|
|
+ private EchoResponseProto2(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
|
|
+
|
|
|
+ private static final EchoResponseProto2 defaultInstance;
|
|
|
+ public static EchoResponseProto2 getDefaultInstance() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ public EchoResponseProto2 getDefaultInstanceForType() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
|
|
|
+ @java.lang.Override
|
|
|
+ public final com.google.protobuf.UnknownFieldSet
|
|
|
+ getUnknownFields() {
|
|
|
+ return this.unknownFields;
|
|
|
+ }
|
|
|
+ private EchoResponseProto2(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ initFields();
|
|
|
+ int mutable_bitField0_ = 0;
|
|
|
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
|
|
+ com.google.protobuf.UnknownFieldSet.newBuilder();
|
|
|
+ try {
|
|
|
+ boolean done = false;
|
|
|
+ while (!done) {
|
|
|
+ int tag = input.readTag();
|
|
|
+ switch (tag) {
|
|
|
+ case 0:
|
|
|
+ done = true;
|
|
|
+ break;
|
|
|
+ default: {
|
|
|
+ if (!parseUnknownField(input, unknownFields,
|
|
|
+ extensionRegistry, tag)) {
|
|
|
+ done = true;
|
|
|
+ }
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 10: {
|
|
|
+ if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ message_ = new com.google.protobuf.LazyStringArrayList();
|
|
|
+ mutable_bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ message_.add(input.readBytes());
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ throw e.setUnfinishedMessage(this);
|
|
|
+ } catch (java.io.IOException e) {
|
|
|
+ throw new com.google.protobuf.InvalidProtocolBufferException(
|
|
|
+ e.getMessage()).setUnfinishedMessage(this);
|
|
|
+ } finally {
|
|
|
+ if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ message_ = new com.google.protobuf.UnmodifiableLazyStringList(message_);
|
|
|
+ }
|
|
|
+ this.unknownFields = unknownFields.build();
|
|
|
+ makeExtensionsImmutable();
|
|
|
+ }
|
|
|
+ }
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoResponseProto2_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoResponseProto2_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static com.google.protobuf.Parser<EchoResponseProto2> PARSER =
|
|
|
+ new com.google.protobuf.AbstractParser<EchoResponseProto2>() {
|
|
|
+ public EchoResponseProto2 parsePartialFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return new EchoResponseProto2(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ };
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public com.google.protobuf.Parser<EchoResponseProto2> getParserForType() {
|
|
|
+ return PARSER;
|
|
|
+ }
|
|
|
+
|
|
|
+ // repeated string message = 1;
|
|
|
+ public static final int MESSAGE_FIELD_NUMBER = 1;
|
|
|
+ private com.google.protobuf.LazyStringList message_;
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public java.util.List<java.lang.String>
|
|
|
+ getMessageList() {
|
|
|
+ return message_;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public int getMessageCount() {
|
|
|
+ return message_.size();
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public java.lang.String getMessage(int index) {
|
|
|
+ return message_.get(index);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public com.google.protobuf.ByteString
|
|
|
+ getMessageBytes(int index) {
|
|
|
+ return message_.getByteString(index);
|
|
|
+ }
|
|
|
+
|
|
|
+ private void initFields() {
|
|
|
+ message_ = com.google.protobuf.LazyStringArrayList.EMPTY;
|
|
|
+ }
|
|
|
+ private byte memoizedIsInitialized = -1;
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ byte isInitialized = memoizedIsInitialized;
|
|
|
+ if (isInitialized != -1) return isInitialized == 1;
|
|
|
+
|
|
|
+ memoizedIsInitialized = 1;
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
|
|
|
+ throws java.io.IOException {
|
|
|
+ getSerializedSize();
|
|
|
+ for (int i = 0; i < message_.size(); i++) {
|
|
|
+ output.writeBytes(1, message_.getByteString(i));
|
|
|
+ }
|
|
|
+ getUnknownFields().writeTo(output);
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedSerializedSize = -1;
|
|
|
+ public int getSerializedSize() {
|
|
|
+ int size = memoizedSerializedSize;
|
|
|
+ if (size != -1) return size;
|
|
|
+
|
|
|
+ size = 0;
|
|
|
+ {
|
|
|
+ int dataSize = 0;
|
|
|
+ for (int i = 0; i < message_.size(); i++) {
|
|
|
+ dataSize += com.google.protobuf.CodedOutputStream
|
|
|
+ .computeBytesSizeNoTag(message_.getByteString(i));
|
|
|
+ }
|
|
|
+ size += dataSize;
|
|
|
+ size += 1 * getMessageList().size();
|
|
|
+ }
|
|
|
+ size += getUnknownFields().getSerializedSize();
|
|
|
+ memoizedSerializedSize = size;
|
|
|
+ return size;
|
|
|
+ }
|
|
|
+
|
|
|
+ private static final long serialVersionUID = 0L;
|
|
|
+ @java.lang.Override
|
|
|
+ protected java.lang.Object writeReplace()
|
|
|
+ throws java.io.ObjectStreamException {
|
|
|
+ return super.writeReplace();
|
|
|
+ }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public boolean equals(final java.lang.Object obj) {
|
|
|
+ if (obj == this) {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2)) {
|
|
|
+ return super.equals(obj);
|
|
|
+ }
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2) obj;
|
|
|
+
|
|
|
+ boolean result = true;
|
|
|
+ result = result && getMessageList()
|
|
|
+ .equals(other.getMessageList());
|
|
|
+ result = result &&
|
|
|
+ getUnknownFields().equals(other.getUnknownFields());
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedHashCode = 0;
|
|
|
+ @java.lang.Override
|
|
|
+ public int hashCode() {
|
|
|
+ if (memoizedHashCode != 0) {
|
|
|
+ return memoizedHashCode;
|
|
|
+ }
|
|
|
+ int hash = 41;
|
|
|
+ hash = (19 * hash) + getDescriptorForType().hashCode();
|
|
|
+ if (getMessageCount() > 0) {
|
|
|
+ hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
|
|
|
+ hash = (53 * hash) + getMessageList().hashCode();
|
|
|
+ }
|
|
|
+ hash = (29 * hash) + getUnknownFields().hashCode();
|
|
|
+ memoizedHashCode = hash;
|
|
|
+ return hash;
|
|
|
+ }
|
|
|
+
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 parseFrom(
|
|
|
+ com.google.protobuf.ByteString data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 parseFrom(
|
|
|
+ com.google.protobuf.ByteString data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 parseFrom(byte[] data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 parseFrom(
|
|
|
+ byte[] data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 parseFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 parseFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 parseDelimitedFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 parseDelimitedFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static Builder newBuilder() { return Builder.create(); }
|
|
|
+ public Builder newBuilderForType() { return newBuilder(); }
|
|
|
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 prototype) {
|
|
|
+ return newBuilder().mergeFrom(prototype);
|
|
|
+ }
|
|
|
+ public Builder toBuilder() { return newBuilder(this); }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ protected Builder newBuilderForType(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ Builder builder = new Builder(parent);
|
|
|
+ return builder;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.EchoResponseProto2}
|
|
|
+ */
|
|
|
+ public static final class Builder extends
|
|
|
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
|
|
|
+ implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2OrBuilder {
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoResponseProto2_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoResponseProto2_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.newBuilder()
|
|
|
+ private Builder() {
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+
|
|
|
+ private Builder(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ super(parent);
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+ private void maybeForceBuilderInitialization() {
|
|
|
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
|
|
|
+ }
|
|
|
+ }
|
|
|
+ private static Builder create() {
|
|
|
+ return new Builder();
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clear() {
|
|
|
+ super.clear();
|
|
|
+ message_ = com.google.protobuf.LazyStringArrayList.EMPTY;
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clone() {
|
|
|
+ return create().mergeFrom(buildPartial());
|
|
|
+ }
|
|
|
+
|
|
|
+ public com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptorForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoResponseProto2_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 getDefaultInstanceForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.getDefaultInstance();
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 build() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 result = buildPartial();
|
|
|
+ if (!result.isInitialized()) {
|
|
|
+ throw newUninitializedMessageException(result);
|
|
|
+ }
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 buildPartial() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2(this);
|
|
|
+ int from_bitField0_ = bitField0_;
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ message_ = new com.google.protobuf.UnmodifiableLazyStringList(
|
|
|
+ message_);
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ }
|
|
|
+ result.message_ = message_;
|
|
|
+ onBuilt();
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(com.google.protobuf.Message other) {
|
|
|
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2) {
|
|
|
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2)other);
|
|
|
+ } else {
|
|
|
+ super.mergeFrom(other);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 other) {
|
|
|
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.getDefaultInstance()) return this;
|
|
|
+ if (!other.message_.isEmpty()) {
|
|
|
+ if (message_.isEmpty()) {
|
|
|
+ message_ = other.message_;
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ } else {
|
|
|
+ ensureMessageIsMutable();
|
|
|
+ message_.addAll(other.message_);
|
|
|
+ }
|
|
|
+ onChanged();
|
|
|
+ }
|
|
|
+ this.mergeUnknownFields(other.getUnknownFields());
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 parsedMessage = null;
|
|
|
+ try {
|
|
|
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2) e.getUnfinishedMessage();
|
|
|
+ throw e;
|
|
|
+ } finally {
|
|
|
+ if (parsedMessage != null) {
|
|
|
+ mergeFrom(parsedMessage);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ private int bitField0_;
|
|
|
+
|
|
|
+ // repeated string message = 1;
|
|
|
+ private com.google.protobuf.LazyStringList message_ = com.google.protobuf.LazyStringArrayList.EMPTY;
|
|
|
+ private void ensureMessageIsMutable() {
|
|
|
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ message_ = new com.google.protobuf.LazyStringArrayList(message_);
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public java.util.List<java.lang.String>
|
|
|
+ getMessageList() {
|
|
|
+ return java.util.Collections.unmodifiableList(message_);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public int getMessageCount() {
|
|
|
+ return message_.size();
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public java.lang.String getMessage(int index) {
|
|
|
+ return message_.get(index);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public com.google.protobuf.ByteString
|
|
|
+ getMessageBytes(int index) {
|
|
|
+ return message_.getByteString(index);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder setMessage(
|
|
|
+ int index, java.lang.String value) {
|
|
|
+ if (value == null) {
|
|
|
+ throw new NullPointerException();
|
|
|
+ }
|
|
|
+ ensureMessageIsMutable();
|
|
|
+ message_.set(index, value);
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder addMessage(
|
|
|
+ java.lang.String value) {
|
|
|
+ if (value == null) {
|
|
|
+ throw new NullPointerException();
|
|
|
+ }
|
|
|
+ ensureMessageIsMutable();
|
|
|
+ message_.add(value);
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder addAllMessage(
|
|
|
+ java.lang.Iterable<java.lang.String> values) {
|
|
|
+ ensureMessageIsMutable();
|
|
|
+ super.addAll(values, message_);
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder clearMessage() {
|
|
|
+ message_ = com.google.protobuf.LazyStringArrayList.EMPTY;
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated string message = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder addMessageBytes(
|
|
|
+ com.google.protobuf.ByteString value) {
|
|
|
+ if (value == null) {
|
|
|
+ throw new NullPointerException();
|
|
|
+ }
|
|
|
+ ensureMessageIsMutable();
|
|
|
+ message_.add(value);
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(builder_scope:hadoop.common.EchoResponseProto2)
|
|
|
+ }
|
|
|
+
|
|
|
+ static {
|
|
|
+ defaultInstance = new EchoResponseProto2(true);
|
|
|
+ defaultInstance.initFields();
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(class_scope:hadoop.common.EchoResponseProto2)
|
|
|
+ }
|
|
|
+
|
|
|
+ public interface AddRequestProtoOrBuilder
|
|
|
+ extends com.google.protobuf.MessageOrBuilder {
|
|
|
+
|
|
|
+ // required int32 param1 = 1;
|
|
|
+ /**
|
|
|
+ * <code>required int32 param1 = 1;</code>
|
|
|
+ */
|
|
|
+ boolean hasParam1();
|
|
|
+ /**
|
|
|
+ * <code>required int32 param1 = 1;</code>
|
|
|
+ */
|
|
|
+ int getParam1();
|
|
|
+
|
|
|
+ // required int32 param2 = 2;
|
|
|
+ /**
|
|
|
+ * <code>required int32 param2 = 2;</code>
|
|
|
+ */
|
|
|
+ boolean hasParam2();
|
|
|
+ /**
|
|
|
+ * <code>required int32 param2 = 2;</code>
|
|
|
+ */
|
|
|
+ int getParam2();
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.AddRequestProto}
|
|
|
+ */
|
|
|
+ public static final class AddRequestProto extends
|
|
|
+ com.google.protobuf.GeneratedMessage
|
|
|
+ implements AddRequestProtoOrBuilder {
|
|
|
+ // Use AddRequestProto.newBuilder() to construct.
|
|
|
+ private AddRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
|
|
+ super(builder);
|
|
|
+ this.unknownFields = builder.getUnknownFields();
|
|
|
+ }
|
|
|
+ private AddRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
|
|
+
|
|
|
+ private static final AddRequestProto defaultInstance;
|
|
|
+ public static AddRequestProto getDefaultInstance() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ public AddRequestProto getDefaultInstanceForType() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
|
|
|
+ @java.lang.Override
|
|
|
+ public final com.google.protobuf.UnknownFieldSet
|
|
|
+ getUnknownFields() {
|
|
|
+ return this.unknownFields;
|
|
|
+ }
|
|
|
+ private AddRequestProto(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ initFields();
|
|
|
+ int mutable_bitField0_ = 0;
|
|
|
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
|
|
+ com.google.protobuf.UnknownFieldSet.newBuilder();
|
|
|
+ try {
|
|
|
+ boolean done = false;
|
|
|
+ while (!done) {
|
|
|
+ int tag = input.readTag();
|
|
|
+ switch (tag) {
|
|
|
+ case 0:
|
|
|
+ done = true;
|
|
|
+ break;
|
|
|
+ default: {
|
|
|
+ if (!parseUnknownField(input, unknownFields,
|
|
|
+ extensionRegistry, tag)) {
|
|
|
+ done = true;
|
|
|
+ }
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 8: {
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ param1_ = input.readInt32();
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 16: {
|
|
|
+ bitField0_ |= 0x00000002;
|
|
|
+ param2_ = input.readInt32();
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ throw e.setUnfinishedMessage(this);
|
|
|
+ } catch (java.io.IOException e) {
|
|
|
+ throw new com.google.protobuf.InvalidProtocolBufferException(
|
|
|
+ e.getMessage()).setUnfinishedMessage(this);
|
|
|
+ } finally {
|
|
|
+ this.unknownFields = unknownFields.build();
|
|
|
+ makeExtensionsImmutable();
|
|
|
+ }
|
|
|
+ }
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddRequestProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddRequestProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static com.google.protobuf.Parser<AddRequestProto> PARSER =
|
|
|
+ new com.google.protobuf.AbstractParser<AddRequestProto>() {
|
|
|
+ public AddRequestProto parsePartialFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return new AddRequestProto(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ };
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public com.google.protobuf.Parser<AddRequestProto> getParserForType() {
|
|
|
+ return PARSER;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int bitField0_;
|
|
|
+ // required int32 param1 = 1;
|
|
|
+ public static final int PARAM1_FIELD_NUMBER = 1;
|
|
|
+ private int param1_;
|
|
|
+ /**
|
|
|
+ * <code>required int32 param1 = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean hasParam1() {
|
|
|
+ return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required int32 param1 = 1;</code>
|
|
|
+ */
|
|
|
+ public int getParam1() {
|
|
|
+ return param1_;
|
|
|
+ }
|
|
|
+
|
|
|
+ // required int32 param2 = 2;
|
|
|
+ public static final int PARAM2_FIELD_NUMBER = 2;
|
|
|
+ private int param2_;
|
|
|
+ /**
|
|
|
+ * <code>required int32 param2 = 2;</code>
|
|
|
+ */
|
|
|
+ public boolean hasParam2() {
|
|
|
+ return ((bitField0_ & 0x00000002) == 0x00000002);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required int32 param2 = 2;</code>
|
|
|
+ */
|
|
|
+ public int getParam2() {
|
|
|
+ return param2_;
|
|
|
+ }
|
|
|
+
|
|
|
+ private void initFields() {
|
|
|
+ param1_ = 0;
|
|
|
+ param2_ = 0;
|
|
|
+ }
|
|
|
+ private byte memoizedIsInitialized = -1;
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ byte isInitialized = memoizedIsInitialized;
|
|
|
+ if (isInitialized != -1) return isInitialized == 1;
|
|
|
+
|
|
|
+ if (!hasParam1()) {
|
|
|
+ memoizedIsInitialized = 0;
|
|
|
+ return false;
|
|
|
+ }
|
|
|
+ if (!hasParam2()) {
|
|
|
+ memoizedIsInitialized = 0;
|
|
|
+ return false;
|
|
|
+ }
|
|
|
+ memoizedIsInitialized = 1;
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
|
|
|
+ throws java.io.IOException {
|
|
|
+ getSerializedSize();
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ output.writeInt32(1, param1_);
|
|
|
+ }
|
|
|
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
|
|
|
+ output.writeInt32(2, param2_);
|
|
|
+ }
|
|
|
+ getUnknownFields().writeTo(output);
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedSerializedSize = -1;
|
|
|
+ public int getSerializedSize() {
|
|
|
+ int size = memoizedSerializedSize;
|
|
|
+ if (size != -1) return size;
|
|
|
+
|
|
|
+ size = 0;
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ size += com.google.protobuf.CodedOutputStream
|
|
|
+ .computeInt32Size(1, param1_);
|
|
|
+ }
|
|
|
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
|
|
|
+ size += com.google.protobuf.CodedOutputStream
|
|
|
+ .computeInt32Size(2, param2_);
|
|
|
+ }
|
|
|
+ size += getUnknownFields().getSerializedSize();
|
|
|
+ memoizedSerializedSize = size;
|
|
|
+ return size;
|
|
|
+ }
|
|
|
+
|
|
|
+ private static final long serialVersionUID = 0L;
|
|
|
+ @java.lang.Override
|
|
|
+ protected java.lang.Object writeReplace()
|
|
|
+ throws java.io.ObjectStreamException {
|
|
|
+ return super.writeReplace();
|
|
|
+ }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public boolean equals(final java.lang.Object obj) {
|
|
|
+ if (obj == this) {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto)) {
|
|
|
+ return super.equals(obj);
|
|
|
+ }
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto) obj;
|
|
|
+
|
|
|
+ boolean result = true;
|
|
|
+ result = result && (hasParam1() == other.hasParam1());
|
|
|
+ if (hasParam1()) {
|
|
|
+ result = result && (getParam1()
|
|
|
+ == other.getParam1());
|
|
|
+ }
|
|
|
+ result = result && (hasParam2() == other.hasParam2());
|
|
|
+ if (hasParam2()) {
|
|
|
+ result = result && (getParam2()
|
|
|
+ == other.getParam2());
|
|
|
+ }
|
|
|
+ result = result &&
|
|
|
+ getUnknownFields().equals(other.getUnknownFields());
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedHashCode = 0;
|
|
|
+ @java.lang.Override
|
|
|
+ public int hashCode() {
|
|
|
+ if (memoizedHashCode != 0) {
|
|
|
+ return memoizedHashCode;
|
|
|
+ }
|
|
|
+ int hash = 41;
|
|
|
+ hash = (19 * hash) + getDescriptorForType().hashCode();
|
|
|
+ if (hasParam1()) {
|
|
|
+ hash = (37 * hash) + PARAM1_FIELD_NUMBER;
|
|
|
+ hash = (53 * hash) + getParam1();
|
|
|
+ }
|
|
|
+ if (hasParam2()) {
|
|
|
+ hash = (37 * hash) + PARAM2_FIELD_NUMBER;
|
|
|
+ hash = (53 * hash) + getParam2();
|
|
|
+ }
|
|
|
+ hash = (29 * hash) + getUnknownFields().hashCode();
|
|
|
+ memoizedHashCode = hash;
|
|
|
+ return hash;
|
|
|
+ }
|
|
|
+
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto parseFrom(byte[] data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto parseFrom(
|
|
|
+ byte[] data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto parseFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto parseFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto parseDelimitedFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto parseDelimitedFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static Builder newBuilder() { return Builder.create(); }
|
|
|
+ public Builder newBuilderForType() { return newBuilder(); }
|
|
|
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto prototype) {
|
|
|
+ return newBuilder().mergeFrom(prototype);
|
|
|
+ }
|
|
|
+ public Builder toBuilder() { return newBuilder(this); }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ protected Builder newBuilderForType(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ Builder builder = new Builder(parent);
|
|
|
+ return builder;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.AddRequestProto}
|
|
|
+ */
|
|
|
+ public static final class Builder extends
|
|
|
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
|
|
|
+ implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProtoOrBuilder {
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddRequestProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddRequestProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto.newBuilder()
|
|
|
+ private Builder() {
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+
|
|
|
+ private Builder(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ super(parent);
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+ private void maybeForceBuilderInitialization() {
|
|
|
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
|
|
|
+ }
|
|
|
+ }
|
|
|
+ private static Builder create() {
|
|
|
+ return new Builder();
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clear() {
|
|
|
+ super.clear();
|
|
|
+ param1_ = 0;
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ param2_ = 0;
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000002);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clone() {
|
|
|
+ return create().mergeFrom(buildPartial());
|
|
|
+ }
|
|
|
+
|
|
|
+ public com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptorForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddRequestProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto getDefaultInstanceForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto.getDefaultInstance();
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto build() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto result = buildPartial();
|
|
|
+ if (!result.isInitialized()) {
|
|
|
+ throw newUninitializedMessageException(result);
|
|
|
+ }
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto buildPartial() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto(this);
|
|
|
+ int from_bitField0_ = bitField0_;
|
|
|
+ int to_bitField0_ = 0;
|
|
|
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ to_bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ result.param1_ = param1_;
|
|
|
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
|
|
|
+ to_bitField0_ |= 0x00000002;
|
|
|
+ }
|
|
|
+ result.param2_ = param2_;
|
|
|
+ result.bitField0_ = to_bitField0_;
|
|
|
+ onBuilt();
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(com.google.protobuf.Message other) {
|
|
|
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto) {
|
|
|
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto)other);
|
|
|
+ } else {
|
|
|
+ super.mergeFrom(other);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto other) {
|
|
|
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto.getDefaultInstance()) return this;
|
|
|
+ if (other.hasParam1()) {
|
|
|
+ setParam1(other.getParam1());
|
|
|
+ }
|
|
|
+ if (other.hasParam2()) {
|
|
|
+ setParam2(other.getParam2());
|
|
|
+ }
|
|
|
+ this.mergeUnknownFields(other.getUnknownFields());
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ if (!hasParam1()) {
|
|
|
+
|
|
|
+ return false;
|
|
|
+ }
|
|
|
+ if (!hasParam2()) {
|
|
|
+
|
|
|
+ return false;
|
|
|
+ }
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto parsedMessage = null;
|
|
|
+ try {
|
|
|
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto) e.getUnfinishedMessage();
|
|
|
+ throw e;
|
|
|
+ } finally {
|
|
|
+ if (parsedMessage != null) {
|
|
|
+ mergeFrom(parsedMessage);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ private int bitField0_;
|
|
|
+
|
|
|
+ // required int32 param1 = 1;
|
|
|
+ private int param1_ ;
|
|
|
+ /**
|
|
|
+ * <code>required int32 param1 = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean hasParam1() {
|
|
|
+ return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required int32 param1 = 1;</code>
|
|
|
+ */
|
|
|
+ public int getParam1() {
|
|
|
+ return param1_;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required int32 param1 = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder setParam1(int value) {
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ param1_ = value;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required int32 param1 = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder clearParam1() {
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ param1_ = 0;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ // required int32 param2 = 2;
|
|
|
+ private int param2_ ;
|
|
|
+ /**
|
|
|
+ * <code>required int32 param2 = 2;</code>
|
|
|
+ */
|
|
|
+ public boolean hasParam2() {
|
|
|
+ return ((bitField0_ & 0x00000002) == 0x00000002);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required int32 param2 = 2;</code>
|
|
|
+ */
|
|
|
+ public int getParam2() {
|
|
|
+ return param2_;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required int32 param2 = 2;</code>
|
|
|
+ */
|
|
|
+ public Builder setParam2(int value) {
|
|
|
+ bitField0_ |= 0x00000002;
|
|
|
+ param2_ = value;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required int32 param2 = 2;</code>
|
|
|
+ */
|
|
|
+ public Builder clearParam2() {
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000002);
|
|
|
+ param2_ = 0;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(builder_scope:hadoop.common.AddRequestProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ static {
|
|
|
+ defaultInstance = new AddRequestProto(true);
|
|
|
+ defaultInstance.initFields();
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(class_scope:hadoop.common.AddRequestProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ public interface AddRequestProto2OrBuilder
|
|
|
+ extends com.google.protobuf.MessageOrBuilder {
|
|
|
+
|
|
|
+ // repeated int32 params = 1;
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 params = 1;</code>
|
|
|
+ */
|
|
|
+ java.util.List<java.lang.Integer> getParamsList();
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 params = 1;</code>
|
|
|
+ */
|
|
|
+ int getParamsCount();
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 params = 1;</code>
|
|
|
+ */
|
|
|
+ int getParams(int index);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.AddRequestProto2}
|
|
|
+ */
|
|
|
+ public static final class AddRequestProto2 extends
|
|
|
+ com.google.protobuf.GeneratedMessage
|
|
|
+ implements AddRequestProto2OrBuilder {
|
|
|
+ // Use AddRequestProto2.newBuilder() to construct.
|
|
|
+ private AddRequestProto2(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
|
|
+ super(builder);
|
|
|
+ this.unknownFields = builder.getUnknownFields();
|
|
|
+ }
|
|
|
+ private AddRequestProto2(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
|
|
+
|
|
|
+ private static final AddRequestProto2 defaultInstance;
|
|
|
+ public static AddRequestProto2 getDefaultInstance() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ public AddRequestProto2 getDefaultInstanceForType() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
|
|
|
+ @java.lang.Override
|
|
|
+ public final com.google.protobuf.UnknownFieldSet
|
|
|
+ getUnknownFields() {
|
|
|
+ return this.unknownFields;
|
|
|
+ }
|
|
|
+ private AddRequestProto2(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ initFields();
|
|
|
+ int mutable_bitField0_ = 0;
|
|
|
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
|
|
+ com.google.protobuf.UnknownFieldSet.newBuilder();
|
|
|
+ try {
|
|
|
+ boolean done = false;
|
|
|
+ while (!done) {
|
|
|
+ int tag = input.readTag();
|
|
|
+ switch (tag) {
|
|
|
+ case 0:
|
|
|
+ done = true;
|
|
|
+ break;
|
|
|
+ default: {
|
|
|
+ if (!parseUnknownField(input, unknownFields,
|
|
|
+ extensionRegistry, tag)) {
|
|
|
+ done = true;
|
|
|
+ }
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 8: {
|
|
|
+ if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ params_ = new java.util.ArrayList<java.lang.Integer>();
|
|
|
+ mutable_bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ params_.add(input.readInt32());
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 10: {
|
|
|
+ int length = input.readRawVarint32();
|
|
|
+ int limit = input.pushLimit(length);
|
|
|
+ if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) {
|
|
|
+ params_ = new java.util.ArrayList<java.lang.Integer>();
|
|
|
+ mutable_bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ while (input.getBytesUntilLimit() > 0) {
|
|
|
+ params_.add(input.readInt32());
|
|
|
+ }
|
|
|
+ input.popLimit(limit);
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ throw e.setUnfinishedMessage(this);
|
|
|
+ } catch (java.io.IOException e) {
|
|
|
+ throw new com.google.protobuf.InvalidProtocolBufferException(
|
|
|
+ e.getMessage()).setUnfinishedMessage(this);
|
|
|
+ } finally {
|
|
|
+ if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ params_ = java.util.Collections.unmodifiableList(params_);
|
|
|
+ }
|
|
|
+ this.unknownFields = unknownFields.build();
|
|
|
+ makeExtensionsImmutable();
|
|
|
+ }
|
|
|
+ }
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddRequestProto2_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddRequestProto2_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static com.google.protobuf.Parser<AddRequestProto2> PARSER =
|
|
|
+ new com.google.protobuf.AbstractParser<AddRequestProto2>() {
|
|
|
+ public AddRequestProto2 parsePartialFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return new AddRequestProto2(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ };
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public com.google.protobuf.Parser<AddRequestProto2> getParserForType() {
|
|
|
+ return PARSER;
|
|
|
+ }
|
|
|
+
|
|
|
+ // repeated int32 params = 1;
|
|
|
+ public static final int PARAMS_FIELD_NUMBER = 1;
|
|
|
+ private java.util.List<java.lang.Integer> params_;
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 params = 1;</code>
|
|
|
+ */
|
|
|
+ public java.util.List<java.lang.Integer>
|
|
|
+ getParamsList() {
|
|
|
+ return params_;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 params = 1;</code>
|
|
|
+ */
|
|
|
+ public int getParamsCount() {
|
|
|
+ return params_.size();
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 params = 1;</code>
|
|
|
+ */
|
|
|
+ public int getParams(int index) {
|
|
|
+ return params_.get(index);
|
|
|
+ }
|
|
|
+
|
|
|
+ private void initFields() {
|
|
|
+ params_ = java.util.Collections.emptyList();
|
|
|
+ }
|
|
|
+ private byte memoizedIsInitialized = -1;
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ byte isInitialized = memoizedIsInitialized;
|
|
|
+ if (isInitialized != -1) return isInitialized == 1;
|
|
|
+
|
|
|
+ memoizedIsInitialized = 1;
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
|
|
|
+ throws java.io.IOException {
|
|
|
+ getSerializedSize();
|
|
|
+ for (int i = 0; i < params_.size(); i++) {
|
|
|
+ output.writeInt32(1, params_.get(i));
|
|
|
+ }
|
|
|
+ getUnknownFields().writeTo(output);
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedSerializedSize = -1;
|
|
|
+ public int getSerializedSize() {
|
|
|
+ int size = memoizedSerializedSize;
|
|
|
+ if (size != -1) return size;
|
|
|
+
|
|
|
+ size = 0;
|
|
|
+ {
|
|
|
+ int dataSize = 0;
|
|
|
+ for (int i = 0; i < params_.size(); i++) {
|
|
|
+ dataSize += com.google.protobuf.CodedOutputStream
|
|
|
+ .computeInt32SizeNoTag(params_.get(i));
|
|
|
+ }
|
|
|
+ size += dataSize;
|
|
|
+ size += 1 * getParamsList().size();
|
|
|
+ }
|
|
|
+ size += getUnknownFields().getSerializedSize();
|
|
|
+ memoizedSerializedSize = size;
|
|
|
+ return size;
|
|
|
+ }
|
|
|
+
|
|
|
+ private static final long serialVersionUID = 0L;
|
|
|
+ @java.lang.Override
|
|
|
+ protected java.lang.Object writeReplace()
|
|
|
+ throws java.io.ObjectStreamException {
|
|
|
+ return super.writeReplace();
|
|
|
+ }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public boolean equals(final java.lang.Object obj) {
|
|
|
+ if (obj == this) {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2)) {
|
|
|
+ return super.equals(obj);
|
|
|
+ }
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2) obj;
|
|
|
+
|
|
|
+ boolean result = true;
|
|
|
+ result = result && getParamsList()
|
|
|
+ .equals(other.getParamsList());
|
|
|
+ result = result &&
|
|
|
+ getUnknownFields().equals(other.getUnknownFields());
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedHashCode = 0;
|
|
|
+ @java.lang.Override
|
|
|
+ public int hashCode() {
|
|
|
+ if (memoizedHashCode != 0) {
|
|
|
+ return memoizedHashCode;
|
|
|
+ }
|
|
|
+ int hash = 41;
|
|
|
+ hash = (19 * hash) + getDescriptorForType().hashCode();
|
|
|
+ if (getParamsCount() > 0) {
|
|
|
+ hash = (37 * hash) + PARAMS_FIELD_NUMBER;
|
|
|
+ hash = (53 * hash) + getParamsList().hashCode();
|
|
|
+ }
|
|
|
+ hash = (29 * hash) + getUnknownFields().hashCode();
|
|
|
+ memoizedHashCode = hash;
|
|
|
+ return hash;
|
|
|
+ }
|
|
|
+
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 parseFrom(
|
|
|
+ com.google.protobuf.ByteString data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 parseFrom(
|
|
|
+ com.google.protobuf.ByteString data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 parseFrom(byte[] data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 parseFrom(
|
|
|
+ byte[] data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 parseFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 parseFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 parseDelimitedFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 parseDelimitedFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static Builder newBuilder() { return Builder.create(); }
|
|
|
+ public Builder newBuilderForType() { return newBuilder(); }
|
|
|
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 prototype) {
|
|
|
+ return newBuilder().mergeFrom(prototype);
|
|
|
+ }
|
|
|
+ public Builder toBuilder() { return newBuilder(this); }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ protected Builder newBuilderForType(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ Builder builder = new Builder(parent);
|
|
|
+ return builder;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.AddRequestProto2}
|
|
|
+ */
|
|
|
+ public static final class Builder extends
|
|
|
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
|
|
|
+ implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2OrBuilder {
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddRequestProto2_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddRequestProto2_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2.newBuilder()
|
|
|
+ private Builder() {
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+
|
|
|
+ private Builder(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ super(parent);
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+ private void maybeForceBuilderInitialization() {
|
|
|
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
|
|
|
+ }
|
|
|
+ }
|
|
|
+ private static Builder create() {
|
|
|
+ return new Builder();
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clear() {
|
|
|
+ super.clear();
|
|
|
+ params_ = java.util.Collections.emptyList();
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clone() {
|
|
|
+ return create().mergeFrom(buildPartial());
|
|
|
+ }
|
|
|
+
|
|
|
+ public com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptorForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddRequestProto2_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 getDefaultInstanceForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2.getDefaultInstance();
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 build() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 result = buildPartial();
|
|
|
+ if (!result.isInitialized()) {
|
|
|
+ throw newUninitializedMessageException(result);
|
|
|
+ }
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 buildPartial() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2(this);
|
|
|
+ int from_bitField0_ = bitField0_;
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ params_ = java.util.Collections.unmodifiableList(params_);
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ }
|
|
|
+ result.params_ = params_;
|
|
|
+ onBuilt();
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(com.google.protobuf.Message other) {
|
|
|
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2) {
|
|
|
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2)other);
|
|
|
+ } else {
|
|
|
+ super.mergeFrom(other);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 other) {
|
|
|
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2.getDefaultInstance()) return this;
|
|
|
+ if (!other.params_.isEmpty()) {
|
|
|
+ if (params_.isEmpty()) {
|
|
|
+ params_ = other.params_;
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ } else {
|
|
|
+ ensureParamsIsMutable();
|
|
|
+ params_.addAll(other.params_);
|
|
|
+ }
|
|
|
+ onChanged();
|
|
|
+ }
|
|
|
+ this.mergeUnknownFields(other.getUnknownFields());
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 parsedMessage = null;
|
|
|
+ try {
|
|
|
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2) e.getUnfinishedMessage();
|
|
|
+ throw e;
|
|
|
+ } finally {
|
|
|
+ if (parsedMessage != null) {
|
|
|
+ mergeFrom(parsedMessage);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ private int bitField0_;
|
|
|
+
|
|
|
+ // repeated int32 params = 1;
|
|
|
+ private java.util.List<java.lang.Integer> params_ = java.util.Collections.emptyList();
|
|
|
+ private void ensureParamsIsMutable() {
|
|
|
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ params_ = new java.util.ArrayList<java.lang.Integer>(params_);
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 params = 1;</code>
|
|
|
+ */
|
|
|
+ public java.util.List<java.lang.Integer>
|
|
|
+ getParamsList() {
|
|
|
+ return java.util.Collections.unmodifiableList(params_);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 params = 1;</code>
|
|
|
+ */
|
|
|
+ public int getParamsCount() {
|
|
|
+ return params_.size();
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 params = 1;</code>
|
|
|
+ */
|
|
|
+ public int getParams(int index) {
|
|
|
+ return params_.get(index);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 params = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder setParams(
|
|
|
+ int index, int value) {
|
|
|
+ ensureParamsIsMutable();
|
|
|
+ params_.set(index, value);
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 params = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder addParams(int value) {
|
|
|
+ ensureParamsIsMutable();
|
|
|
+ params_.add(value);
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 params = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder addAllParams(
|
|
|
+ java.lang.Iterable<? extends java.lang.Integer> values) {
|
|
|
+ ensureParamsIsMutable();
|
|
|
+ super.addAll(values, params_);
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 params = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder clearParams() {
|
|
|
+ params_ = java.util.Collections.emptyList();
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(builder_scope:hadoop.common.AddRequestProto2)
|
|
|
+ }
|
|
|
+
|
|
|
+ static {
|
|
|
+ defaultInstance = new AddRequestProto2(true);
|
|
|
+ defaultInstance.initFields();
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(class_scope:hadoop.common.AddRequestProto2)
|
|
|
+ }
|
|
|
+
|
|
|
+ public interface AddResponseProtoOrBuilder
|
|
|
+ extends com.google.protobuf.MessageOrBuilder {
|
|
|
+
|
|
|
+ // required int32 result = 1;
|
|
|
+ /**
|
|
|
+ * <code>required int32 result = 1;</code>
|
|
|
+ */
|
|
|
+ boolean hasResult();
|
|
|
+ /**
|
|
|
+ * <code>required int32 result = 1;</code>
|
|
|
+ */
|
|
|
+ int getResult();
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.AddResponseProto}
|
|
|
+ */
|
|
|
+ public static final class AddResponseProto extends
|
|
|
+ com.google.protobuf.GeneratedMessage
|
|
|
+ implements AddResponseProtoOrBuilder {
|
|
|
+ // Use AddResponseProto.newBuilder() to construct.
|
|
|
+ private AddResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
|
|
+ super(builder);
|
|
|
+ this.unknownFields = builder.getUnknownFields();
|
|
|
+ }
|
|
|
+ private AddResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
|
|
+
|
|
|
+ private static final AddResponseProto defaultInstance;
|
|
|
+ public static AddResponseProto getDefaultInstance() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ public AddResponseProto getDefaultInstanceForType() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
|
|
|
+ @java.lang.Override
|
|
|
+ public final com.google.protobuf.UnknownFieldSet
|
|
|
+ getUnknownFields() {
|
|
|
+ return this.unknownFields;
|
|
|
+ }
|
|
|
+ private AddResponseProto(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ initFields();
|
|
|
+ int mutable_bitField0_ = 0;
|
|
|
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
|
|
+ com.google.protobuf.UnknownFieldSet.newBuilder();
|
|
|
+ try {
|
|
|
+ boolean done = false;
|
|
|
+ while (!done) {
|
|
|
+ int tag = input.readTag();
|
|
|
+ switch (tag) {
|
|
|
+ case 0:
|
|
|
+ done = true;
|
|
|
+ break;
|
|
|
+ default: {
|
|
|
+ if (!parseUnknownField(input, unknownFields,
|
|
|
+ extensionRegistry, tag)) {
|
|
|
+ done = true;
|
|
|
+ }
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 8: {
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ result_ = input.readInt32();
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ throw e.setUnfinishedMessage(this);
|
|
|
+ } catch (java.io.IOException e) {
|
|
|
+ throw new com.google.protobuf.InvalidProtocolBufferException(
|
|
|
+ e.getMessage()).setUnfinishedMessage(this);
|
|
|
+ } finally {
|
|
|
+ this.unknownFields = unknownFields.build();
|
|
|
+ makeExtensionsImmutable();
|
|
|
+ }
|
|
|
+ }
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddResponseProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddResponseProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static com.google.protobuf.Parser<AddResponseProto> PARSER =
|
|
|
+ new com.google.protobuf.AbstractParser<AddResponseProto>() {
|
|
|
+ public AddResponseProto parsePartialFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return new AddResponseProto(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ };
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public com.google.protobuf.Parser<AddResponseProto> getParserForType() {
|
|
|
+ return PARSER;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int bitField0_;
|
|
|
+ // required int32 result = 1;
|
|
|
+ public static final int RESULT_FIELD_NUMBER = 1;
|
|
|
+ private int result_;
|
|
|
+ /**
|
|
|
+ * <code>required int32 result = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean hasResult() {
|
|
|
+ return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required int32 result = 1;</code>
|
|
|
+ */
|
|
|
+ public int getResult() {
|
|
|
+ return result_;
|
|
|
+ }
|
|
|
+
|
|
|
+ private void initFields() {
|
|
|
+ result_ = 0;
|
|
|
+ }
|
|
|
+ private byte memoizedIsInitialized = -1;
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ byte isInitialized = memoizedIsInitialized;
|
|
|
+ if (isInitialized != -1) return isInitialized == 1;
|
|
|
+
|
|
|
+ if (!hasResult()) {
|
|
|
+ memoizedIsInitialized = 0;
|
|
|
+ return false;
|
|
|
+ }
|
|
|
+ memoizedIsInitialized = 1;
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
|
|
|
+ throws java.io.IOException {
|
|
|
+ getSerializedSize();
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ output.writeInt32(1, result_);
|
|
|
+ }
|
|
|
+ getUnknownFields().writeTo(output);
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedSerializedSize = -1;
|
|
|
+ public int getSerializedSize() {
|
|
|
+ int size = memoizedSerializedSize;
|
|
|
+ if (size != -1) return size;
|
|
|
+
|
|
|
+ size = 0;
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ size += com.google.protobuf.CodedOutputStream
|
|
|
+ .computeInt32Size(1, result_);
|
|
|
+ }
|
|
|
+ size += getUnknownFields().getSerializedSize();
|
|
|
+ memoizedSerializedSize = size;
|
|
|
+ return size;
|
|
|
+ }
|
|
|
+
|
|
|
+ private static final long serialVersionUID = 0L;
|
|
|
+ @java.lang.Override
|
|
|
+ protected java.lang.Object writeReplace()
|
|
|
+ throws java.io.ObjectStreamException {
|
|
|
+ return super.writeReplace();
|
|
|
+ }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public boolean equals(final java.lang.Object obj) {
|
|
|
+ if (obj == this) {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto)) {
|
|
|
+ return super.equals(obj);
|
|
|
+ }
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto) obj;
|
|
|
+
|
|
|
+ boolean result = true;
|
|
|
+ result = result && (hasResult() == other.hasResult());
|
|
|
+ if (hasResult()) {
|
|
|
+ result = result && (getResult()
|
|
|
+ == other.getResult());
|
|
|
+ }
|
|
|
+ result = result &&
|
|
|
+ getUnknownFields().equals(other.getUnknownFields());
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedHashCode = 0;
|
|
|
+ @java.lang.Override
|
|
|
+ public int hashCode() {
|
|
|
+ if (memoizedHashCode != 0) {
|
|
|
+ return memoizedHashCode;
|
|
|
+ }
|
|
|
+ int hash = 41;
|
|
|
+ hash = (19 * hash) + getDescriptorForType().hashCode();
|
|
|
+ if (hasResult()) {
|
|
|
+ hash = (37 * hash) + RESULT_FIELD_NUMBER;
|
|
|
+ hash = (53 * hash) + getResult();
|
|
|
+ }
|
|
|
+ hash = (29 * hash) + getUnknownFields().hashCode();
|
|
|
+ memoizedHashCode = hash;
|
|
|
+ return hash;
|
|
|
+ }
|
|
|
+
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto parseFrom(byte[] data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto parseFrom(
|
|
|
+ byte[] data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto parseFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto parseFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto parseDelimitedFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto parseDelimitedFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static Builder newBuilder() { return Builder.create(); }
|
|
|
+ public Builder newBuilderForType() { return newBuilder(); }
|
|
|
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto prototype) {
|
|
|
+ return newBuilder().mergeFrom(prototype);
|
|
|
+ }
|
|
|
+ public Builder toBuilder() { return newBuilder(this); }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ protected Builder newBuilderForType(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ Builder builder = new Builder(parent);
|
|
|
+ return builder;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.AddResponseProto}
|
|
|
+ */
|
|
|
+ public static final class Builder extends
|
|
|
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
|
|
|
+ implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProtoOrBuilder {
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddResponseProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddResponseProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.newBuilder()
|
|
|
+ private Builder() {
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+
|
|
|
+ private Builder(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ super(parent);
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+ private void maybeForceBuilderInitialization() {
|
|
|
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
|
|
|
+ }
|
|
|
+ }
|
|
|
+ private static Builder create() {
|
|
|
+ return new Builder();
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clear() {
|
|
|
+ super.clear();
|
|
|
+ result_ = 0;
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clone() {
|
|
|
+ return create().mergeFrom(buildPartial());
|
|
|
+ }
|
|
|
+
|
|
|
+ public com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptorForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AddResponseProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto getDefaultInstanceForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance();
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto build() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto result = buildPartial();
|
|
|
+ if (!result.isInitialized()) {
|
|
|
+ throw newUninitializedMessageException(result);
|
|
|
+ }
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto buildPartial() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto(this);
|
|
|
+ int from_bitField0_ = bitField0_;
|
|
|
+ int to_bitField0_ = 0;
|
|
|
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ to_bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ result.result_ = result_;
|
|
|
+ result.bitField0_ = to_bitField0_;
|
|
|
+ onBuilt();
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(com.google.protobuf.Message other) {
|
|
|
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto) {
|
|
|
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto)other);
|
|
|
+ } else {
|
|
|
+ super.mergeFrom(other);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto other) {
|
|
|
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance()) return this;
|
|
|
+ if (other.hasResult()) {
|
|
|
+ setResult(other.getResult());
|
|
|
+ }
|
|
|
+ this.mergeUnknownFields(other.getUnknownFields());
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ if (!hasResult()) {
|
|
|
+
|
|
|
+ return false;
|
|
|
+ }
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto parsedMessage = null;
|
|
|
+ try {
|
|
|
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto) e.getUnfinishedMessage();
|
|
|
+ throw e;
|
|
|
+ } finally {
|
|
|
+ if (parsedMessage != null) {
|
|
|
+ mergeFrom(parsedMessage);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ private int bitField0_;
|
|
|
+
|
|
|
+ // required int32 result = 1;
|
|
|
+ private int result_ ;
|
|
|
+ /**
|
|
|
+ * <code>required int32 result = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean hasResult() {
|
|
|
+ return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required int32 result = 1;</code>
|
|
|
+ */
|
|
|
+ public int getResult() {
|
|
|
+ return result_;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required int32 result = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder setResult(int value) {
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ result_ = value;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required int32 result = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder clearResult() {
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ result_ = 0;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(builder_scope:hadoop.common.AddResponseProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ static {
|
|
|
+ defaultInstance = new AddResponseProto(true);
|
|
|
+ defaultInstance.initFields();
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(class_scope:hadoop.common.AddResponseProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ public interface ExchangeRequestProtoOrBuilder
|
|
|
+ extends com.google.protobuf.MessageOrBuilder {
|
|
|
+
|
|
|
+ // repeated int32 values = 1;
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ java.util.List<java.lang.Integer> getValuesList();
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ int getValuesCount();
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ int getValues(int index);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.ExchangeRequestProto}
|
|
|
+ */
|
|
|
+ public static final class ExchangeRequestProto extends
|
|
|
+ com.google.protobuf.GeneratedMessage
|
|
|
+ implements ExchangeRequestProtoOrBuilder {
|
|
|
+ // Use ExchangeRequestProto.newBuilder() to construct.
|
|
|
+ private ExchangeRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
|
|
+ super(builder);
|
|
|
+ this.unknownFields = builder.getUnknownFields();
|
|
|
+ }
|
|
|
+ private ExchangeRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
|
|
+
|
|
|
+ private static final ExchangeRequestProto defaultInstance;
|
|
|
+ public static ExchangeRequestProto getDefaultInstance() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ public ExchangeRequestProto getDefaultInstanceForType() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
|
|
|
+ @java.lang.Override
|
|
|
+ public final com.google.protobuf.UnknownFieldSet
|
|
|
+ getUnknownFields() {
|
|
|
+ return this.unknownFields;
|
|
|
+ }
|
|
|
+ private ExchangeRequestProto(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ initFields();
|
|
|
+ int mutable_bitField0_ = 0;
|
|
|
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
|
|
+ com.google.protobuf.UnknownFieldSet.newBuilder();
|
|
|
+ try {
|
|
|
+ boolean done = false;
|
|
|
+ while (!done) {
|
|
|
+ int tag = input.readTag();
|
|
|
+ switch (tag) {
|
|
|
+ case 0:
|
|
|
+ done = true;
|
|
|
+ break;
|
|
|
+ default: {
|
|
|
+ if (!parseUnknownField(input, unknownFields,
|
|
|
+ extensionRegistry, tag)) {
|
|
|
+ done = true;
|
|
|
+ }
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 8: {
|
|
|
+ if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ values_ = new java.util.ArrayList<java.lang.Integer>();
|
|
|
+ mutable_bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ values_.add(input.readInt32());
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 10: {
|
|
|
+ int length = input.readRawVarint32();
|
|
|
+ int limit = input.pushLimit(length);
|
|
|
+ if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) {
|
|
|
+ values_ = new java.util.ArrayList<java.lang.Integer>();
|
|
|
+ mutable_bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ while (input.getBytesUntilLimit() > 0) {
|
|
|
+ values_.add(input.readInt32());
|
|
|
+ }
|
|
|
+ input.popLimit(limit);
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ throw e.setUnfinishedMessage(this);
|
|
|
+ } catch (java.io.IOException e) {
|
|
|
+ throw new com.google.protobuf.InvalidProtocolBufferException(
|
|
|
+ e.getMessage()).setUnfinishedMessage(this);
|
|
|
+ } finally {
|
|
|
+ if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ values_ = java.util.Collections.unmodifiableList(values_);
|
|
|
+ }
|
|
|
+ this.unknownFields = unknownFields.build();
|
|
|
+ makeExtensionsImmutable();
|
|
|
+ }
|
|
|
+ }
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_ExchangeRequestProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_ExchangeRequestProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static com.google.protobuf.Parser<ExchangeRequestProto> PARSER =
|
|
|
+ new com.google.protobuf.AbstractParser<ExchangeRequestProto>() {
|
|
|
+ public ExchangeRequestProto parsePartialFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return new ExchangeRequestProto(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ };
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public com.google.protobuf.Parser<ExchangeRequestProto> getParserForType() {
|
|
|
+ return PARSER;
|
|
|
+ }
|
|
|
+
|
|
|
+ // repeated int32 values = 1;
|
|
|
+ public static final int VALUES_FIELD_NUMBER = 1;
|
|
|
+ private java.util.List<java.lang.Integer> values_;
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ public java.util.List<java.lang.Integer>
|
|
|
+ getValuesList() {
|
|
|
+ return values_;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ public int getValuesCount() {
|
|
|
+ return values_.size();
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ public int getValues(int index) {
|
|
|
+ return values_.get(index);
|
|
|
+ }
|
|
|
+
|
|
|
+ private void initFields() {
|
|
|
+ values_ = java.util.Collections.emptyList();
|
|
|
+ }
|
|
|
+ private byte memoizedIsInitialized = -1;
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ byte isInitialized = memoizedIsInitialized;
|
|
|
+ if (isInitialized != -1) return isInitialized == 1;
|
|
|
+
|
|
|
+ memoizedIsInitialized = 1;
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
|
|
|
+ throws java.io.IOException {
|
|
|
+ getSerializedSize();
|
|
|
+ for (int i = 0; i < values_.size(); i++) {
|
|
|
+ output.writeInt32(1, values_.get(i));
|
|
|
+ }
|
|
|
+ getUnknownFields().writeTo(output);
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedSerializedSize = -1;
|
|
|
+ public int getSerializedSize() {
|
|
|
+ int size = memoizedSerializedSize;
|
|
|
+ if (size != -1) return size;
|
|
|
+
|
|
|
+ size = 0;
|
|
|
+ {
|
|
|
+ int dataSize = 0;
|
|
|
+ for (int i = 0; i < values_.size(); i++) {
|
|
|
+ dataSize += com.google.protobuf.CodedOutputStream
|
|
|
+ .computeInt32SizeNoTag(values_.get(i));
|
|
|
+ }
|
|
|
+ size += dataSize;
|
|
|
+ size += 1 * getValuesList().size();
|
|
|
+ }
|
|
|
+ size += getUnknownFields().getSerializedSize();
|
|
|
+ memoizedSerializedSize = size;
|
|
|
+ return size;
|
|
|
+ }
|
|
|
+
|
|
|
+ private static final long serialVersionUID = 0L;
|
|
|
+ @java.lang.Override
|
|
|
+ protected java.lang.Object writeReplace()
|
|
|
+ throws java.io.ObjectStreamException {
|
|
|
+ return super.writeReplace();
|
|
|
+ }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public boolean equals(final java.lang.Object obj) {
|
|
|
+ if (obj == this) {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto)) {
|
|
|
+ return super.equals(obj);
|
|
|
+ }
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto) obj;
|
|
|
+
|
|
|
+ boolean result = true;
|
|
|
+ result = result && getValuesList()
|
|
|
+ .equals(other.getValuesList());
|
|
|
+ result = result &&
|
|
|
+ getUnknownFields().equals(other.getUnknownFields());
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedHashCode = 0;
|
|
|
+ @java.lang.Override
|
|
|
+ public int hashCode() {
|
|
|
+ if (memoizedHashCode != 0) {
|
|
|
+ return memoizedHashCode;
|
|
|
+ }
|
|
|
+ int hash = 41;
|
|
|
+ hash = (19 * hash) + getDescriptorForType().hashCode();
|
|
|
+ if (getValuesCount() > 0) {
|
|
|
+ hash = (37 * hash) + VALUES_FIELD_NUMBER;
|
|
|
+ hash = (53 * hash) + getValuesList().hashCode();
|
|
|
+ }
|
|
|
+ hash = (29 * hash) + getUnknownFields().hashCode();
|
|
|
+ memoizedHashCode = hash;
|
|
|
+ return hash;
|
|
|
+ }
|
|
|
+
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto parseFrom(byte[] data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto parseFrom(
|
|
|
+ byte[] data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto parseFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto parseFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto parseDelimitedFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto parseDelimitedFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static Builder newBuilder() { return Builder.create(); }
|
|
|
+ public Builder newBuilderForType() { return newBuilder(); }
|
|
|
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto prototype) {
|
|
|
+ return newBuilder().mergeFrom(prototype);
|
|
|
+ }
|
|
|
+ public Builder toBuilder() { return newBuilder(this); }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ protected Builder newBuilderForType(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ Builder builder = new Builder(parent);
|
|
|
+ return builder;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.ExchangeRequestProto}
|
|
|
+ */
|
|
|
+ public static final class Builder extends
|
|
|
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
|
|
|
+ implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProtoOrBuilder {
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_ExchangeRequestProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_ExchangeRequestProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto.newBuilder()
|
|
|
+ private Builder() {
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+
|
|
|
+ private Builder(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ super(parent);
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+ private void maybeForceBuilderInitialization() {
|
|
|
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
|
|
|
+ }
|
|
|
+ }
|
|
|
+ private static Builder create() {
|
|
|
+ return new Builder();
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clear() {
|
|
|
+ super.clear();
|
|
|
+ values_ = java.util.Collections.emptyList();
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clone() {
|
|
|
+ return create().mergeFrom(buildPartial());
|
|
|
+ }
|
|
|
+
|
|
|
+ public com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptorForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_ExchangeRequestProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto getDefaultInstanceForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto.getDefaultInstance();
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto build() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto result = buildPartial();
|
|
|
+ if (!result.isInitialized()) {
|
|
|
+ throw newUninitializedMessageException(result);
|
|
|
+ }
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto buildPartial() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto(this);
|
|
|
+ int from_bitField0_ = bitField0_;
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ values_ = java.util.Collections.unmodifiableList(values_);
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ }
|
|
|
+ result.values_ = values_;
|
|
|
+ onBuilt();
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(com.google.protobuf.Message other) {
|
|
|
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto) {
|
|
|
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto)other);
|
|
|
+ } else {
|
|
|
+ super.mergeFrom(other);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto other) {
|
|
|
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto.getDefaultInstance()) return this;
|
|
|
+ if (!other.values_.isEmpty()) {
|
|
|
+ if (values_.isEmpty()) {
|
|
|
+ values_ = other.values_;
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ } else {
|
|
|
+ ensureValuesIsMutable();
|
|
|
+ values_.addAll(other.values_);
|
|
|
+ }
|
|
|
+ onChanged();
|
|
|
+ }
|
|
|
+ this.mergeUnknownFields(other.getUnknownFields());
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto parsedMessage = null;
|
|
|
+ try {
|
|
|
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto) e.getUnfinishedMessage();
|
|
|
+ throw e;
|
|
|
+ } finally {
|
|
|
+ if (parsedMessage != null) {
|
|
|
+ mergeFrom(parsedMessage);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ private int bitField0_;
|
|
|
+
|
|
|
+ // repeated int32 values = 1;
|
|
|
+ private java.util.List<java.lang.Integer> values_ = java.util.Collections.emptyList();
|
|
|
+ private void ensureValuesIsMutable() {
|
|
|
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ values_ = new java.util.ArrayList<java.lang.Integer>(values_);
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ public java.util.List<java.lang.Integer>
|
|
|
+ getValuesList() {
|
|
|
+ return java.util.Collections.unmodifiableList(values_);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ public int getValuesCount() {
|
|
|
+ return values_.size();
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ public int getValues(int index) {
|
|
|
+ return values_.get(index);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder setValues(
|
|
|
+ int index, int value) {
|
|
|
+ ensureValuesIsMutable();
|
|
|
+ values_.set(index, value);
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder addValues(int value) {
|
|
|
+ ensureValuesIsMutable();
|
|
|
+ values_.add(value);
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder addAllValues(
|
|
|
+ java.lang.Iterable<? extends java.lang.Integer> values) {
|
|
|
+ ensureValuesIsMutable();
|
|
|
+ super.addAll(values, values_);
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder clearValues() {
|
|
|
+ values_ = java.util.Collections.emptyList();
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(builder_scope:hadoop.common.ExchangeRequestProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ static {
|
|
|
+ defaultInstance = new ExchangeRequestProto(true);
|
|
|
+ defaultInstance.initFields();
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(class_scope:hadoop.common.ExchangeRequestProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ public interface ExchangeResponseProtoOrBuilder
|
|
|
+ extends com.google.protobuf.MessageOrBuilder {
|
|
|
+
|
|
|
+ // repeated int32 values = 1;
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ java.util.List<java.lang.Integer> getValuesList();
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ int getValuesCount();
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ int getValues(int index);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.ExchangeResponseProto}
|
|
|
+ */
|
|
|
+ public static final class ExchangeResponseProto extends
|
|
|
+ com.google.protobuf.GeneratedMessage
|
|
|
+ implements ExchangeResponseProtoOrBuilder {
|
|
|
+ // Use ExchangeResponseProto.newBuilder() to construct.
|
|
|
+ private ExchangeResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
|
|
+ super(builder);
|
|
|
+ this.unknownFields = builder.getUnknownFields();
|
|
|
+ }
|
|
|
+ private ExchangeResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
|
|
+
|
|
|
+ private static final ExchangeResponseProto defaultInstance;
|
|
|
+ public static ExchangeResponseProto getDefaultInstance() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ public ExchangeResponseProto getDefaultInstanceForType() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
|
|
|
+ @java.lang.Override
|
|
|
+ public final com.google.protobuf.UnknownFieldSet
|
|
|
+ getUnknownFields() {
|
|
|
+ return this.unknownFields;
|
|
|
+ }
|
|
|
+ private ExchangeResponseProto(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ initFields();
|
|
|
+ int mutable_bitField0_ = 0;
|
|
|
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
|
|
+ com.google.protobuf.UnknownFieldSet.newBuilder();
|
|
|
+ try {
|
|
|
+ boolean done = false;
|
|
|
+ while (!done) {
|
|
|
+ int tag = input.readTag();
|
|
|
+ switch (tag) {
|
|
|
+ case 0:
|
|
|
+ done = true;
|
|
|
+ break;
|
|
|
+ default: {
|
|
|
+ if (!parseUnknownField(input, unknownFields,
|
|
|
+ extensionRegistry, tag)) {
|
|
|
+ done = true;
|
|
|
+ }
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 8: {
|
|
|
+ if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ values_ = new java.util.ArrayList<java.lang.Integer>();
|
|
|
+ mutable_bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ values_.add(input.readInt32());
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 10: {
|
|
|
+ int length = input.readRawVarint32();
|
|
|
+ int limit = input.pushLimit(length);
|
|
|
+ if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) {
|
|
|
+ values_ = new java.util.ArrayList<java.lang.Integer>();
|
|
|
+ mutable_bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ while (input.getBytesUntilLimit() > 0) {
|
|
|
+ values_.add(input.readInt32());
|
|
|
+ }
|
|
|
+ input.popLimit(limit);
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ throw e.setUnfinishedMessage(this);
|
|
|
+ } catch (java.io.IOException e) {
|
|
|
+ throw new com.google.protobuf.InvalidProtocolBufferException(
|
|
|
+ e.getMessage()).setUnfinishedMessage(this);
|
|
|
+ } finally {
|
|
|
+ if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ values_ = java.util.Collections.unmodifiableList(values_);
|
|
|
+ }
|
|
|
+ this.unknownFields = unknownFields.build();
|
|
|
+ makeExtensionsImmutable();
|
|
|
+ }
|
|
|
+ }
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_ExchangeResponseProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_ExchangeResponseProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static com.google.protobuf.Parser<ExchangeResponseProto> PARSER =
|
|
|
+ new com.google.protobuf.AbstractParser<ExchangeResponseProto>() {
|
|
|
+ public ExchangeResponseProto parsePartialFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return new ExchangeResponseProto(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ };
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public com.google.protobuf.Parser<ExchangeResponseProto> getParserForType() {
|
|
|
+ return PARSER;
|
|
|
+ }
|
|
|
+
|
|
|
+ // repeated int32 values = 1;
|
|
|
+ public static final int VALUES_FIELD_NUMBER = 1;
|
|
|
+ private java.util.List<java.lang.Integer> values_;
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ public java.util.List<java.lang.Integer>
|
|
|
+ getValuesList() {
|
|
|
+ return values_;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ public int getValuesCount() {
|
|
|
+ return values_.size();
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ public int getValues(int index) {
|
|
|
+ return values_.get(index);
|
|
|
+ }
|
|
|
+
|
|
|
+ private void initFields() {
|
|
|
+ values_ = java.util.Collections.emptyList();
|
|
|
+ }
|
|
|
+ private byte memoizedIsInitialized = -1;
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ byte isInitialized = memoizedIsInitialized;
|
|
|
+ if (isInitialized != -1) return isInitialized == 1;
|
|
|
+
|
|
|
+ memoizedIsInitialized = 1;
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
|
|
|
+ throws java.io.IOException {
|
|
|
+ getSerializedSize();
|
|
|
+ for (int i = 0; i < values_.size(); i++) {
|
|
|
+ output.writeInt32(1, values_.get(i));
|
|
|
+ }
|
|
|
+ getUnknownFields().writeTo(output);
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedSerializedSize = -1;
|
|
|
+ public int getSerializedSize() {
|
|
|
+ int size = memoizedSerializedSize;
|
|
|
+ if (size != -1) return size;
|
|
|
+
|
|
|
+ size = 0;
|
|
|
+ {
|
|
|
+ int dataSize = 0;
|
|
|
+ for (int i = 0; i < values_.size(); i++) {
|
|
|
+ dataSize += com.google.protobuf.CodedOutputStream
|
|
|
+ .computeInt32SizeNoTag(values_.get(i));
|
|
|
+ }
|
|
|
+ size += dataSize;
|
|
|
+ size += 1 * getValuesList().size();
|
|
|
+ }
|
|
|
+ size += getUnknownFields().getSerializedSize();
|
|
|
+ memoizedSerializedSize = size;
|
|
|
+ return size;
|
|
|
+ }
|
|
|
+
|
|
|
+ private static final long serialVersionUID = 0L;
|
|
|
+ @java.lang.Override
|
|
|
+ protected java.lang.Object writeReplace()
|
|
|
+ throws java.io.ObjectStreamException {
|
|
|
+ return super.writeReplace();
|
|
|
+ }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public boolean equals(final java.lang.Object obj) {
|
|
|
+ if (obj == this) {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto)) {
|
|
|
+ return super.equals(obj);
|
|
|
+ }
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto) obj;
|
|
|
+
|
|
|
+ boolean result = true;
|
|
|
+ result = result && getValuesList()
|
|
|
+ .equals(other.getValuesList());
|
|
|
+ result = result &&
|
|
|
+ getUnknownFields().equals(other.getUnknownFields());
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedHashCode = 0;
|
|
|
+ @java.lang.Override
|
|
|
+ public int hashCode() {
|
|
|
+ if (memoizedHashCode != 0) {
|
|
|
+ return memoizedHashCode;
|
|
|
+ }
|
|
|
+ int hash = 41;
|
|
|
+ hash = (19 * hash) + getDescriptorForType().hashCode();
|
|
|
+ if (getValuesCount() > 0) {
|
|
|
+ hash = (37 * hash) + VALUES_FIELD_NUMBER;
|
|
|
+ hash = (53 * hash) + getValuesList().hashCode();
|
|
|
+ }
|
|
|
+ hash = (29 * hash) + getUnknownFields().hashCode();
|
|
|
+ memoizedHashCode = hash;
|
|
|
+ return hash;
|
|
|
+ }
|
|
|
+
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto parseFrom(byte[] data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto parseFrom(
|
|
|
+ byte[] data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto parseFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto parseFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto parseDelimitedFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto parseDelimitedFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static Builder newBuilder() { return Builder.create(); }
|
|
|
+ public Builder newBuilderForType() { return newBuilder(); }
|
|
|
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto prototype) {
|
|
|
+ return newBuilder().mergeFrom(prototype);
|
|
|
+ }
|
|
|
+ public Builder toBuilder() { return newBuilder(this); }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ protected Builder newBuilderForType(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ Builder builder = new Builder(parent);
|
|
|
+ return builder;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.ExchangeResponseProto}
|
|
|
+ */
|
|
|
+ public static final class Builder extends
|
|
|
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
|
|
|
+ implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProtoOrBuilder {
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_ExchangeResponseProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_ExchangeResponseProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.newBuilder()
|
|
|
+ private Builder() {
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+
|
|
|
+ private Builder(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ super(parent);
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+ private void maybeForceBuilderInitialization() {
|
|
|
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
|
|
|
+ }
|
|
|
+ }
|
|
|
+ private static Builder create() {
|
|
|
+ return new Builder();
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clear() {
|
|
|
+ super.clear();
|
|
|
+ values_ = java.util.Collections.emptyList();
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clone() {
|
|
|
+ return create().mergeFrom(buildPartial());
|
|
|
+ }
|
|
|
+
|
|
|
+ public com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptorForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_ExchangeResponseProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto getDefaultInstanceForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.getDefaultInstance();
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto build() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto result = buildPartial();
|
|
|
+ if (!result.isInitialized()) {
|
|
|
+ throw newUninitializedMessageException(result);
|
|
|
+ }
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto buildPartial() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto(this);
|
|
|
+ int from_bitField0_ = bitField0_;
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ values_ = java.util.Collections.unmodifiableList(values_);
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ }
|
|
|
+ result.values_ = values_;
|
|
|
+ onBuilt();
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(com.google.protobuf.Message other) {
|
|
|
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto) {
|
|
|
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto)other);
|
|
|
+ } else {
|
|
|
+ super.mergeFrom(other);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto other) {
|
|
|
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.getDefaultInstance()) return this;
|
|
|
+ if (!other.values_.isEmpty()) {
|
|
|
+ if (values_.isEmpty()) {
|
|
|
+ values_ = other.values_;
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ } else {
|
|
|
+ ensureValuesIsMutable();
|
|
|
+ values_.addAll(other.values_);
|
|
|
+ }
|
|
|
+ onChanged();
|
|
|
+ }
|
|
|
+ this.mergeUnknownFields(other.getUnknownFields());
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto parsedMessage = null;
|
|
|
+ try {
|
|
|
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto) e.getUnfinishedMessage();
|
|
|
+ throw e;
|
|
|
+ } finally {
|
|
|
+ if (parsedMessage != null) {
|
|
|
+ mergeFrom(parsedMessage);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ private int bitField0_;
|
|
|
+
|
|
|
+ // repeated int32 values = 1;
|
|
|
+ private java.util.List<java.lang.Integer> values_ = java.util.Collections.emptyList();
|
|
|
+ private void ensureValuesIsMutable() {
|
|
|
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ values_ = new java.util.ArrayList<java.lang.Integer>(values_);
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ public java.util.List<java.lang.Integer>
|
|
|
+ getValuesList() {
|
|
|
+ return java.util.Collections.unmodifiableList(values_);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ public int getValuesCount() {
|
|
|
+ return values_.size();
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ public int getValues(int index) {
|
|
|
+ return values_.get(index);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder setValues(
|
|
|
+ int index, int value) {
|
|
|
+ ensureValuesIsMutable();
|
|
|
+ values_.set(index, value);
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder addValues(int value) {
|
|
|
+ ensureValuesIsMutable();
|
|
|
+ values_.add(value);
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder addAllValues(
|
|
|
+ java.lang.Iterable<? extends java.lang.Integer> values) {
|
|
|
+ ensureValuesIsMutable();
|
|
|
+ super.addAll(values, values_);
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>repeated int32 values = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder clearValues() {
|
|
|
+ values_ = java.util.Collections.emptyList();
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(builder_scope:hadoop.common.ExchangeResponseProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ static {
|
|
|
+ defaultInstance = new ExchangeResponseProto(true);
|
|
|
+ defaultInstance.initFields();
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(class_scope:hadoop.common.ExchangeResponseProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ public interface AuthMethodResponseProtoOrBuilder
|
|
|
+ extends com.google.protobuf.MessageOrBuilder {
|
|
|
+
|
|
|
+ // required int32 code = 1;
|
|
|
+ /**
|
|
|
+ * <code>required int32 code = 1;</code>
|
|
|
+ */
|
|
|
+ boolean hasCode();
|
|
|
+ /**
|
|
|
+ * <code>required int32 code = 1;</code>
|
|
|
+ */
|
|
|
+ int getCode();
|
|
|
+
|
|
|
+ // required string mechanismName = 2;
|
|
|
+ /**
|
|
|
+ * <code>required string mechanismName = 2;</code>
|
|
|
+ */
|
|
|
+ boolean hasMechanismName();
|
|
|
+ /**
|
|
|
+ * <code>required string mechanismName = 2;</code>
|
|
|
+ */
|
|
|
+ java.lang.String getMechanismName();
|
|
|
+ /**
|
|
|
+ * <code>required string mechanismName = 2;</code>
|
|
|
+ */
|
|
|
+ com.google.protobuf.ByteString
|
|
|
+ getMechanismNameBytes();
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.AuthMethodResponseProto}
|
|
|
+ */
|
|
|
+ public static final class AuthMethodResponseProto extends
|
|
|
+ com.google.protobuf.GeneratedMessage
|
|
|
+ implements AuthMethodResponseProtoOrBuilder {
|
|
|
+ // Use AuthMethodResponseProto.newBuilder() to construct.
|
|
|
+ private AuthMethodResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
|
|
+ super(builder);
|
|
|
+ this.unknownFields = builder.getUnknownFields();
|
|
|
+ }
|
|
|
+ private AuthMethodResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
|
|
+
|
|
|
+ private static final AuthMethodResponseProto defaultInstance;
|
|
|
+ public static AuthMethodResponseProto getDefaultInstance() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ public AuthMethodResponseProto getDefaultInstanceForType() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
|
|
|
+ @java.lang.Override
|
|
|
+ public final com.google.protobuf.UnknownFieldSet
|
|
|
+ getUnknownFields() {
|
|
|
+ return this.unknownFields;
|
|
|
+ }
|
|
|
+ private AuthMethodResponseProto(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ initFields();
|
|
|
+ int mutable_bitField0_ = 0;
|
|
|
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
|
|
+ com.google.protobuf.UnknownFieldSet.newBuilder();
|
|
|
+ try {
|
|
|
+ boolean done = false;
|
|
|
+ while (!done) {
|
|
|
+ int tag = input.readTag();
|
|
|
+ switch (tag) {
|
|
|
+ case 0:
|
|
|
+ done = true;
|
|
|
+ break;
|
|
|
+ default: {
|
|
|
+ if (!parseUnknownField(input, unknownFields,
|
|
|
+ extensionRegistry, tag)) {
|
|
|
+ done = true;
|
|
|
+ }
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 8: {
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ code_ = input.readInt32();
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 18: {
|
|
|
+ bitField0_ |= 0x00000002;
|
|
|
+ mechanismName_ = input.readBytes();
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ throw e.setUnfinishedMessage(this);
|
|
|
+ } catch (java.io.IOException e) {
|
|
|
+ throw new com.google.protobuf.InvalidProtocolBufferException(
|
|
|
+ e.getMessage()).setUnfinishedMessage(this);
|
|
|
+ } finally {
|
|
|
+ this.unknownFields = unknownFields.build();
|
|
|
+ makeExtensionsImmutable();
|
|
|
+ }
|
|
|
+ }
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AuthMethodResponseProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AuthMethodResponseProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static com.google.protobuf.Parser<AuthMethodResponseProto> PARSER =
|
|
|
+ new com.google.protobuf.AbstractParser<AuthMethodResponseProto>() {
|
|
|
+ public AuthMethodResponseProto parsePartialFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return new AuthMethodResponseProto(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ };
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public com.google.protobuf.Parser<AuthMethodResponseProto> getParserForType() {
|
|
|
+ return PARSER;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int bitField0_;
|
|
|
+ // required int32 code = 1;
|
|
|
+ public static final int CODE_FIELD_NUMBER = 1;
|
|
|
+ private int code_;
|
|
|
+ /**
|
|
|
+ * <code>required int32 code = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean hasCode() {
|
|
|
+ return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required int32 code = 1;</code>
|
|
|
+ */
|
|
|
+ public int getCode() {
|
|
|
+ return code_;
|
|
|
+ }
|
|
|
+
|
|
|
+ // required string mechanismName = 2;
|
|
|
+ public static final int MECHANISMNAME_FIELD_NUMBER = 2;
|
|
|
+ private java.lang.Object mechanismName_;
|
|
|
+ /**
|
|
|
+ * <code>required string mechanismName = 2;</code>
|
|
|
+ */
|
|
|
+ public boolean hasMechanismName() {
|
|
|
+ return ((bitField0_ & 0x00000002) == 0x00000002);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string mechanismName = 2;</code>
|
|
|
+ */
|
|
|
+ public java.lang.String getMechanismName() {
|
|
|
+ java.lang.Object ref = mechanismName_;
|
|
|
+ if (ref instanceof java.lang.String) {
|
|
|
+ return (java.lang.String) ref;
|
|
|
+ } else {
|
|
|
+ com.google.protobuf.ByteString bs =
|
|
|
+ (com.google.protobuf.ByteString) ref;
|
|
|
+ java.lang.String s = bs.toStringUtf8();
|
|
|
+ if (bs.isValidUtf8()) {
|
|
|
+ mechanismName_ = s;
|
|
|
+ }
|
|
|
+ return s;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string mechanismName = 2;</code>
|
|
|
+ */
|
|
|
+ public com.google.protobuf.ByteString
|
|
|
+ getMechanismNameBytes() {
|
|
|
+ java.lang.Object ref = mechanismName_;
|
|
|
+ if (ref instanceof java.lang.String) {
|
|
|
+ com.google.protobuf.ByteString b =
|
|
|
+ com.google.protobuf.ByteString.copyFromUtf8(
|
|
|
+ (java.lang.String) ref);
|
|
|
+ mechanismName_ = b;
|
|
|
+ return b;
|
|
|
+ } else {
|
|
|
+ return (com.google.protobuf.ByteString) ref;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ private void initFields() {
|
|
|
+ code_ = 0;
|
|
|
+ mechanismName_ = "";
|
|
|
+ }
|
|
|
+ private byte memoizedIsInitialized = -1;
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ byte isInitialized = memoizedIsInitialized;
|
|
|
+ if (isInitialized != -1) return isInitialized == 1;
|
|
|
+
|
|
|
+ if (!hasCode()) {
|
|
|
+ memoizedIsInitialized = 0;
|
|
|
+ return false;
|
|
|
+ }
|
|
|
+ if (!hasMechanismName()) {
|
|
|
+ memoizedIsInitialized = 0;
|
|
|
+ return false;
|
|
|
+ }
|
|
|
+ memoizedIsInitialized = 1;
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
|
|
|
+ throws java.io.IOException {
|
|
|
+ getSerializedSize();
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ output.writeInt32(1, code_);
|
|
|
+ }
|
|
|
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
|
|
|
+ output.writeBytes(2, getMechanismNameBytes());
|
|
|
+ }
|
|
|
+ getUnknownFields().writeTo(output);
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedSerializedSize = -1;
|
|
|
+ public int getSerializedSize() {
|
|
|
+ int size = memoizedSerializedSize;
|
|
|
+ if (size != -1) return size;
|
|
|
+
|
|
|
+ size = 0;
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ size += com.google.protobuf.CodedOutputStream
|
|
|
+ .computeInt32Size(1, code_);
|
|
|
+ }
|
|
|
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
|
|
|
+ size += com.google.protobuf.CodedOutputStream
|
|
|
+ .computeBytesSize(2, getMechanismNameBytes());
|
|
|
+ }
|
|
|
+ size += getUnknownFields().getSerializedSize();
|
|
|
+ memoizedSerializedSize = size;
|
|
|
+ return size;
|
|
|
+ }
|
|
|
+
|
|
|
+ private static final long serialVersionUID = 0L;
|
|
|
+ @java.lang.Override
|
|
|
+ protected java.lang.Object writeReplace()
|
|
|
+ throws java.io.ObjectStreamException {
|
|
|
+ return super.writeReplace();
|
|
|
+ }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public boolean equals(final java.lang.Object obj) {
|
|
|
+ if (obj == this) {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto)) {
|
|
|
+ return super.equals(obj);
|
|
|
+ }
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto) obj;
|
|
|
+
|
|
|
+ boolean result = true;
|
|
|
+ result = result && (hasCode() == other.hasCode());
|
|
|
+ if (hasCode()) {
|
|
|
+ result = result && (getCode()
|
|
|
+ == other.getCode());
|
|
|
+ }
|
|
|
+ result = result && (hasMechanismName() == other.hasMechanismName());
|
|
|
+ if (hasMechanismName()) {
|
|
|
+ result = result && getMechanismName()
|
|
|
+ .equals(other.getMechanismName());
|
|
|
+ }
|
|
|
+ result = result &&
|
|
|
+ getUnknownFields().equals(other.getUnknownFields());
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedHashCode = 0;
|
|
|
+ @java.lang.Override
|
|
|
+ public int hashCode() {
|
|
|
+ if (memoizedHashCode != 0) {
|
|
|
+ return memoizedHashCode;
|
|
|
+ }
|
|
|
+ int hash = 41;
|
|
|
+ hash = (19 * hash) + getDescriptorForType().hashCode();
|
|
|
+ if (hasCode()) {
|
|
|
+ hash = (37 * hash) + CODE_FIELD_NUMBER;
|
|
|
+ hash = (53 * hash) + getCode();
|
|
|
+ }
|
|
|
+ if (hasMechanismName()) {
|
|
|
+ hash = (37 * hash) + MECHANISMNAME_FIELD_NUMBER;
|
|
|
+ hash = (53 * hash) + getMechanismName().hashCode();
|
|
|
+ }
|
|
|
+ hash = (29 * hash) + getUnknownFields().hashCode();
|
|
|
+ memoizedHashCode = hash;
|
|
|
+ return hash;
|
|
|
+ }
|
|
|
+
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto parseFrom(byte[] data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto parseFrom(
|
|
|
+ byte[] data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto parseFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto parseFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto parseDelimitedFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto parseDelimitedFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static Builder newBuilder() { return Builder.create(); }
|
|
|
+ public Builder newBuilderForType() { return newBuilder(); }
|
|
|
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto prototype) {
|
|
|
+ return newBuilder().mergeFrom(prototype);
|
|
|
+ }
|
|
|
+ public Builder toBuilder() { return newBuilder(this); }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ protected Builder newBuilderForType(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ Builder builder = new Builder(parent);
|
|
|
+ return builder;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.AuthMethodResponseProto}
|
|
|
+ */
|
|
|
+ public static final class Builder extends
|
|
|
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
|
|
|
+ implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProtoOrBuilder {
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AuthMethodResponseProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AuthMethodResponseProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.newBuilder()
|
|
|
+ private Builder() {
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+
|
|
|
+ private Builder(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ super(parent);
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+ private void maybeForceBuilderInitialization() {
|
|
|
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
|
|
|
+ }
|
|
|
+ }
|
|
|
+ private static Builder create() {
|
|
|
+ return new Builder();
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clear() {
|
|
|
+ super.clear();
|
|
|
+ code_ = 0;
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ mechanismName_ = "";
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000002);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clone() {
|
|
|
+ return create().mergeFrom(buildPartial());
|
|
|
+ }
|
|
|
+
|
|
|
+ public com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptorForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_AuthMethodResponseProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto getDefaultInstanceForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.getDefaultInstance();
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto build() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto result = buildPartial();
|
|
|
+ if (!result.isInitialized()) {
|
|
|
+ throw newUninitializedMessageException(result);
|
|
|
+ }
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto buildPartial() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto(this);
|
|
|
+ int from_bitField0_ = bitField0_;
|
|
|
+ int to_bitField0_ = 0;
|
|
|
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ to_bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ result.code_ = code_;
|
|
|
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
|
|
|
+ to_bitField0_ |= 0x00000002;
|
|
|
+ }
|
|
|
+ result.mechanismName_ = mechanismName_;
|
|
|
+ result.bitField0_ = to_bitField0_;
|
|
|
+ onBuilt();
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(com.google.protobuf.Message other) {
|
|
|
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto) {
|
|
|
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto)other);
|
|
|
+ } else {
|
|
|
+ super.mergeFrom(other);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto other) {
|
|
|
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.getDefaultInstance()) return this;
|
|
|
+ if (other.hasCode()) {
|
|
|
+ setCode(other.getCode());
|
|
|
+ }
|
|
|
+ if (other.hasMechanismName()) {
|
|
|
+ bitField0_ |= 0x00000002;
|
|
|
+ mechanismName_ = other.mechanismName_;
|
|
|
+ onChanged();
|
|
|
+ }
|
|
|
+ this.mergeUnknownFields(other.getUnknownFields());
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ if (!hasCode()) {
|
|
|
+
|
|
|
+ return false;
|
|
|
+ }
|
|
|
+ if (!hasMechanismName()) {
|
|
|
+
|
|
|
+ return false;
|
|
|
+ }
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto parsedMessage = null;
|
|
|
+ try {
|
|
|
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto) e.getUnfinishedMessage();
|
|
|
+ throw e;
|
|
|
+ } finally {
|
|
|
+ if (parsedMessage != null) {
|
|
|
+ mergeFrom(parsedMessage);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ private int bitField0_;
|
|
|
+
|
|
|
+ // required int32 code = 1;
|
|
|
+ private int code_ ;
|
|
|
+ /**
|
|
|
+ * <code>required int32 code = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean hasCode() {
|
|
|
+ return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required int32 code = 1;</code>
|
|
|
+ */
|
|
|
+ public int getCode() {
|
|
|
+ return code_;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required int32 code = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder setCode(int value) {
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ code_ = value;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required int32 code = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder clearCode() {
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ code_ = 0;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ // required string mechanismName = 2;
|
|
|
+ private java.lang.Object mechanismName_ = "";
|
|
|
+ /**
|
|
|
+ * <code>required string mechanismName = 2;</code>
|
|
|
+ */
|
|
|
+ public boolean hasMechanismName() {
|
|
|
+ return ((bitField0_ & 0x00000002) == 0x00000002);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string mechanismName = 2;</code>
|
|
|
+ */
|
|
|
+ public java.lang.String getMechanismName() {
|
|
|
+ java.lang.Object ref = mechanismName_;
|
|
|
+ if (!(ref instanceof java.lang.String)) {
|
|
|
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
|
|
|
+ .toStringUtf8();
|
|
|
+ mechanismName_ = s;
|
|
|
+ return s;
|
|
|
+ } else {
|
|
|
+ return (java.lang.String) ref;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string mechanismName = 2;</code>
|
|
|
+ */
|
|
|
+ public com.google.protobuf.ByteString
|
|
|
+ getMechanismNameBytes() {
|
|
|
+ java.lang.Object ref = mechanismName_;
|
|
|
+ if (ref instanceof String) {
|
|
|
+ com.google.protobuf.ByteString b =
|
|
|
+ com.google.protobuf.ByteString.copyFromUtf8(
|
|
|
+ (java.lang.String) ref);
|
|
|
+ mechanismName_ = b;
|
|
|
+ return b;
|
|
|
+ } else {
|
|
|
+ return (com.google.protobuf.ByteString) ref;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string mechanismName = 2;</code>
|
|
|
+ */
|
|
|
+ public Builder setMechanismName(
|
|
|
+ java.lang.String value) {
|
|
|
+ if (value == null) {
|
|
|
+ throw new NullPointerException();
|
|
|
+ }
|
|
|
+ bitField0_ |= 0x00000002;
|
|
|
+ mechanismName_ = value;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string mechanismName = 2;</code>
|
|
|
+ */
|
|
|
+ public Builder clearMechanismName() {
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000002);
|
|
|
+ mechanismName_ = getDefaultInstance().getMechanismName();
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string mechanismName = 2;</code>
|
|
|
+ */
|
|
|
+ public Builder setMechanismNameBytes(
|
|
|
+ com.google.protobuf.ByteString value) {
|
|
|
+ if (value == null) {
|
|
|
+ throw new NullPointerException();
|
|
|
+ }
|
|
|
+ bitField0_ |= 0x00000002;
|
|
|
+ mechanismName_ = value;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(builder_scope:hadoop.common.AuthMethodResponseProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ static {
|
|
|
+ defaultInstance = new AuthMethodResponseProto(true);
|
|
|
+ defaultInstance.initFields();
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(class_scope:hadoop.common.AuthMethodResponseProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ public interface UserResponseProtoOrBuilder
|
|
|
+ extends com.google.protobuf.MessageOrBuilder {
|
|
|
+
|
|
|
+ // required string user = 1;
|
|
|
+ /**
|
|
|
+ * <code>required string user = 1;</code>
|
|
|
+ */
|
|
|
+ boolean hasUser();
|
|
|
+ /**
|
|
|
+ * <code>required string user = 1;</code>
|
|
|
+ */
|
|
|
+ java.lang.String getUser();
|
|
|
+ /**
|
|
|
+ * <code>required string user = 1;</code>
|
|
|
+ */
|
|
|
+ com.google.protobuf.ByteString
|
|
|
+ getUserBytes();
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.UserResponseProto}
|
|
|
+ */
|
|
|
+ public static final class UserResponseProto extends
|
|
|
+ com.google.protobuf.GeneratedMessage
|
|
|
+ implements UserResponseProtoOrBuilder {
|
|
|
+ // Use UserResponseProto.newBuilder() to construct.
|
|
|
+ private UserResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
|
|
+ super(builder);
|
|
|
+ this.unknownFields = builder.getUnknownFields();
|
|
|
+ }
|
|
|
+ private UserResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
|
|
+
|
|
|
+ private static final UserResponseProto defaultInstance;
|
|
|
+ public static UserResponseProto getDefaultInstance() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ public UserResponseProto getDefaultInstanceForType() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
|
|
|
+ @java.lang.Override
|
|
|
+ public final com.google.protobuf.UnknownFieldSet
|
|
|
+ getUnknownFields() {
|
|
|
+ return this.unknownFields;
|
|
|
+ }
|
|
|
+ private UserResponseProto(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ initFields();
|
|
|
+ int mutable_bitField0_ = 0;
|
|
|
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
|
|
+ com.google.protobuf.UnknownFieldSet.newBuilder();
|
|
|
+ try {
|
|
|
+ boolean done = false;
|
|
|
+ while (!done) {
|
|
|
+ int tag = input.readTag();
|
|
|
+ switch (tag) {
|
|
|
+ case 0:
|
|
|
+ done = true;
|
|
|
+ break;
|
|
|
+ default: {
|
|
|
+ if (!parseUnknownField(input, unknownFields,
|
|
|
+ extensionRegistry, tag)) {
|
|
|
+ done = true;
|
|
|
+ }
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 10: {
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ user_ = input.readBytes();
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ throw e.setUnfinishedMessage(this);
|
|
|
+ } catch (java.io.IOException e) {
|
|
|
+ throw new com.google.protobuf.InvalidProtocolBufferException(
|
|
|
+ e.getMessage()).setUnfinishedMessage(this);
|
|
|
+ } finally {
|
|
|
+ this.unknownFields = unknownFields.build();
|
|
|
+ makeExtensionsImmutable();
|
|
|
+ }
|
|
|
+ }
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_UserResponseProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_UserResponseProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static com.google.protobuf.Parser<UserResponseProto> PARSER =
|
|
|
+ new com.google.protobuf.AbstractParser<UserResponseProto>() {
|
|
|
+ public UserResponseProto parsePartialFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return new UserResponseProto(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ };
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public com.google.protobuf.Parser<UserResponseProto> getParserForType() {
|
|
|
+ return PARSER;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int bitField0_;
|
|
|
+ // required string user = 1;
|
|
|
+ public static final int USER_FIELD_NUMBER = 1;
|
|
|
+ private java.lang.Object user_;
|
|
|
+ /**
|
|
|
+ * <code>required string user = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean hasUser() {
|
|
|
+ return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string user = 1;</code>
|
|
|
+ */
|
|
|
+ public java.lang.String getUser() {
|
|
|
+ java.lang.Object ref = user_;
|
|
|
+ if (ref instanceof java.lang.String) {
|
|
|
+ return (java.lang.String) ref;
|
|
|
+ } else {
|
|
|
+ com.google.protobuf.ByteString bs =
|
|
|
+ (com.google.protobuf.ByteString) ref;
|
|
|
+ java.lang.String s = bs.toStringUtf8();
|
|
|
+ if (bs.isValidUtf8()) {
|
|
|
+ user_ = s;
|
|
|
+ }
|
|
|
+ return s;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string user = 1;</code>
|
|
|
+ */
|
|
|
+ public com.google.protobuf.ByteString
|
|
|
+ getUserBytes() {
|
|
|
+ java.lang.Object ref = user_;
|
|
|
+ if (ref instanceof java.lang.String) {
|
|
|
+ com.google.protobuf.ByteString b =
|
|
|
+ com.google.protobuf.ByteString.copyFromUtf8(
|
|
|
+ (java.lang.String) ref);
|
|
|
+ user_ = b;
|
|
|
+ return b;
|
|
|
+ } else {
|
|
|
+ return (com.google.protobuf.ByteString) ref;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ private void initFields() {
|
|
|
+ user_ = "";
|
|
|
+ }
|
|
|
+ private byte memoizedIsInitialized = -1;
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ byte isInitialized = memoizedIsInitialized;
|
|
|
+ if (isInitialized != -1) return isInitialized == 1;
|
|
|
+
|
|
|
+ if (!hasUser()) {
|
|
|
+ memoizedIsInitialized = 0;
|
|
|
+ return false;
|
|
|
+ }
|
|
|
+ memoizedIsInitialized = 1;
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
|
|
|
+ throws java.io.IOException {
|
|
|
+ getSerializedSize();
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ output.writeBytes(1, getUserBytes());
|
|
|
+ }
|
|
|
+ getUnknownFields().writeTo(output);
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedSerializedSize = -1;
|
|
|
+ public int getSerializedSize() {
|
|
|
+ int size = memoizedSerializedSize;
|
|
|
+ if (size != -1) return size;
|
|
|
+
|
|
|
+ size = 0;
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ size += com.google.protobuf.CodedOutputStream
|
|
|
+ .computeBytesSize(1, getUserBytes());
|
|
|
+ }
|
|
|
+ size += getUnknownFields().getSerializedSize();
|
|
|
+ memoizedSerializedSize = size;
|
|
|
+ return size;
|
|
|
+ }
|
|
|
+
|
|
|
+ private static final long serialVersionUID = 0L;
|
|
|
+ @java.lang.Override
|
|
|
+ protected java.lang.Object writeReplace()
|
|
|
+ throws java.io.ObjectStreamException {
|
|
|
+ return super.writeReplace();
|
|
|
+ }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public boolean equals(final java.lang.Object obj) {
|
|
|
+ if (obj == this) {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto)) {
|
|
|
+ return super.equals(obj);
|
|
|
+ }
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto) obj;
|
|
|
+
|
|
|
+ boolean result = true;
|
|
|
+ result = result && (hasUser() == other.hasUser());
|
|
|
+ if (hasUser()) {
|
|
|
+ result = result && getUser()
|
|
|
+ .equals(other.getUser());
|
|
|
+ }
|
|
|
+ result = result &&
|
|
|
+ getUnknownFields().equals(other.getUnknownFields());
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedHashCode = 0;
|
|
|
+ @java.lang.Override
|
|
|
+ public int hashCode() {
|
|
|
+ if (memoizedHashCode != 0) {
|
|
|
+ return memoizedHashCode;
|
|
|
+ }
|
|
|
+ int hash = 41;
|
|
|
+ hash = (19 * hash) + getDescriptorForType().hashCode();
|
|
|
+ if (hasUser()) {
|
|
|
+ hash = (37 * hash) + USER_FIELD_NUMBER;
|
|
|
+ hash = (53 * hash) + getUser().hashCode();
|
|
|
+ }
|
|
|
+ hash = (29 * hash) + getUnknownFields().hashCode();
|
|
|
+ memoizedHashCode = hash;
|
|
|
+ return hash;
|
|
|
+ }
|
|
|
+
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto parseFrom(
|
|
|
+ com.google.protobuf.ByteString data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto parseFrom(byte[] data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto parseFrom(
|
|
|
+ byte[] data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto parseFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto parseFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto parseDelimitedFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto parseDelimitedFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static Builder newBuilder() { return Builder.create(); }
|
|
|
+ public Builder newBuilderForType() { return newBuilder(); }
|
|
|
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto prototype) {
|
|
|
+ return newBuilder().mergeFrom(prototype);
|
|
|
+ }
|
|
|
+ public Builder toBuilder() { return newBuilder(this); }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ protected Builder newBuilderForType(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ Builder builder = new Builder(parent);
|
|
|
+ return builder;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.UserResponseProto}
|
|
|
+ */
|
|
|
+ public static final class Builder extends
|
|
|
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
|
|
|
+ implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProtoOrBuilder {
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_UserResponseProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_UserResponseProto_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.newBuilder()
|
|
|
+ private Builder() {
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+
|
|
|
+ private Builder(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ super(parent);
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+ private void maybeForceBuilderInitialization() {
|
|
|
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
|
|
|
+ }
|
|
|
+ }
|
|
|
+ private static Builder create() {
|
|
|
+ return new Builder();
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clear() {
|
|
|
+ super.clear();
|
|
|
+ user_ = "";
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clone() {
|
|
|
+ return create().mergeFrom(buildPartial());
|
|
|
+ }
|
|
|
+
|
|
|
+ public com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptorForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_UserResponseProto_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto getDefaultInstanceForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance();
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto build() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto result = buildPartial();
|
|
|
+ if (!result.isInitialized()) {
|
|
|
+ throw newUninitializedMessageException(result);
|
|
|
+ }
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto buildPartial() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto(this);
|
|
|
+ int from_bitField0_ = bitField0_;
|
|
|
+ int to_bitField0_ = 0;
|
|
|
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ to_bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ result.user_ = user_;
|
|
|
+ result.bitField0_ = to_bitField0_;
|
|
|
+ onBuilt();
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(com.google.protobuf.Message other) {
|
|
|
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto) {
|
|
|
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto)other);
|
|
|
+ } else {
|
|
|
+ super.mergeFrom(other);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto other) {
|
|
|
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance()) return this;
|
|
|
+ if (other.hasUser()) {
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ user_ = other.user_;
|
|
|
+ onChanged();
|
|
|
+ }
|
|
|
+ this.mergeUnknownFields(other.getUnknownFields());
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ if (!hasUser()) {
|
|
|
+
|
|
|
+ return false;
|
|
|
+ }
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto parsedMessage = null;
|
|
|
+ try {
|
|
|
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto) e.getUnfinishedMessage();
|
|
|
+ throw e;
|
|
|
+ } finally {
|
|
|
+ if (parsedMessage != null) {
|
|
|
+ mergeFrom(parsedMessage);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ private int bitField0_;
|
|
|
+
|
|
|
+ // required string user = 1;
|
|
|
+ private java.lang.Object user_ = "";
|
|
|
+ /**
|
|
|
+ * <code>required string user = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean hasUser() {
|
|
|
+ return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string user = 1;</code>
|
|
|
+ */
|
|
|
+ public java.lang.String getUser() {
|
|
|
+ java.lang.Object ref = user_;
|
|
|
+ if (!(ref instanceof java.lang.String)) {
|
|
|
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
|
|
|
+ .toStringUtf8();
|
|
|
+ user_ = s;
|
|
|
+ return s;
|
|
|
+ } else {
|
|
|
+ return (java.lang.String) ref;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string user = 1;</code>
|
|
|
+ */
|
|
|
+ public com.google.protobuf.ByteString
|
|
|
+ getUserBytes() {
|
|
|
+ java.lang.Object ref = user_;
|
|
|
+ if (ref instanceof String) {
|
|
|
+ com.google.protobuf.ByteString b =
|
|
|
+ com.google.protobuf.ByteString.copyFromUtf8(
|
|
|
+ (java.lang.String) ref);
|
|
|
+ user_ = b;
|
|
|
+ return b;
|
|
|
+ } else {
|
|
|
+ return (com.google.protobuf.ByteString) ref;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string user = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder setUser(
|
|
|
+ java.lang.String value) {
|
|
|
+ if (value == null) {
|
|
|
+ throw new NullPointerException();
|
|
|
+ }
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ user_ = value;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string user = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder clearUser() {
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ user_ = getDefaultInstance().getUser();
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>required string user = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder setUserBytes(
|
|
|
+ com.google.protobuf.ByteString value) {
|
|
|
+ if (value == null) {
|
|
|
+ throw new NullPointerException();
|
|
|
+ }
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ user_ = value;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(builder_scope:hadoop.common.UserResponseProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ static {
|
|
|
+ defaultInstance = new UserResponseProto(true);
|
|
|
+ defaultInstance.initFields();
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(class_scope:hadoop.common.UserResponseProto)
|
|
|
+ }
|
|
|
+
|
|
|
+ public interface SleepRequestProto2OrBuilder
|
|
|
+ extends com.google.protobuf.MessageOrBuilder {
|
|
|
+
|
|
|
+ // optional int64 sleep_time = 1;
|
|
|
+ /**
|
|
|
+ * <code>optional int64 sleep_time = 1;</code>
|
|
|
+ */
|
|
|
+ boolean hasSleepTime();
|
|
|
+ /**
|
|
|
+ * <code>optional int64 sleep_time = 1;</code>
|
|
|
+ */
|
|
|
+ long getSleepTime();
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.SleepRequestProto2}
|
|
|
+ */
|
|
|
+ public static final class SleepRequestProto2 extends
|
|
|
+ com.google.protobuf.GeneratedMessage
|
|
|
+ implements SleepRequestProto2OrBuilder {
|
|
|
+ // Use SleepRequestProto2.newBuilder() to construct.
|
|
|
+ private SleepRequestProto2(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
|
|
+ super(builder);
|
|
|
+ this.unknownFields = builder.getUnknownFields();
|
|
|
+ }
|
|
|
+ private SleepRequestProto2(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
|
|
+
|
|
|
+ private static final SleepRequestProto2 defaultInstance;
|
|
|
+ public static SleepRequestProto2 getDefaultInstance() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ public SleepRequestProto2 getDefaultInstanceForType() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
|
|
|
+ @java.lang.Override
|
|
|
+ public final com.google.protobuf.UnknownFieldSet
|
|
|
+ getUnknownFields() {
|
|
|
+ return this.unknownFields;
|
|
|
+ }
|
|
|
+ private SleepRequestProto2(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ initFields();
|
|
|
+ int mutable_bitField0_ = 0;
|
|
|
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
|
|
+ com.google.protobuf.UnknownFieldSet.newBuilder();
|
|
|
+ try {
|
|
|
+ boolean done = false;
|
|
|
+ while (!done) {
|
|
|
+ int tag = input.readTag();
|
|
|
+ switch (tag) {
|
|
|
+ case 0:
|
|
|
+ done = true;
|
|
|
+ break;
|
|
|
+ default: {
|
|
|
+ if (!parseUnknownField(input, unknownFields,
|
|
|
+ extensionRegistry, tag)) {
|
|
|
+ done = true;
|
|
|
+ }
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 8: {
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ sleepTime_ = input.readInt64();
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ throw e.setUnfinishedMessage(this);
|
|
|
+ } catch (java.io.IOException e) {
|
|
|
+ throw new com.google.protobuf.InvalidProtocolBufferException(
|
|
|
+ e.getMessage()).setUnfinishedMessage(this);
|
|
|
+ } finally {
|
|
|
+ this.unknownFields = unknownFields.build();
|
|
|
+ makeExtensionsImmutable();
|
|
|
+ }
|
|
|
+ }
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepRequestProto2_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepRequestProto2_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static com.google.protobuf.Parser<SleepRequestProto2> PARSER =
|
|
|
+ new com.google.protobuf.AbstractParser<SleepRequestProto2>() {
|
|
|
+ public SleepRequestProto2 parsePartialFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return new SleepRequestProto2(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ };
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public com.google.protobuf.Parser<SleepRequestProto2> getParserForType() {
|
|
|
+ return PARSER;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int bitField0_;
|
|
|
+ // optional int64 sleep_time = 1;
|
|
|
+ public static final int SLEEP_TIME_FIELD_NUMBER = 1;
|
|
|
+ private long sleepTime_;
|
|
|
+ /**
|
|
|
+ * <code>optional int64 sleep_time = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean hasSleepTime() {
|
|
|
+ return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional int64 sleep_time = 1;</code>
|
|
|
+ */
|
|
|
+ public long getSleepTime() {
|
|
|
+ return sleepTime_;
|
|
|
+ }
|
|
|
+
|
|
|
+ private void initFields() {
|
|
|
+ sleepTime_ = 0L;
|
|
|
+ }
|
|
|
+ private byte memoizedIsInitialized = -1;
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ byte isInitialized = memoizedIsInitialized;
|
|
|
+ if (isInitialized != -1) return isInitialized == 1;
|
|
|
+
|
|
|
+ memoizedIsInitialized = 1;
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
|
|
|
+ throws java.io.IOException {
|
|
|
+ getSerializedSize();
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ output.writeInt64(1, sleepTime_);
|
|
|
+ }
|
|
|
+ getUnknownFields().writeTo(output);
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedSerializedSize = -1;
|
|
|
+ public int getSerializedSize() {
|
|
|
+ int size = memoizedSerializedSize;
|
|
|
+ if (size != -1) return size;
|
|
|
+
|
|
|
+ size = 0;
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ size += com.google.protobuf.CodedOutputStream
|
|
|
+ .computeInt64Size(1, sleepTime_);
|
|
|
+ }
|
|
|
+ size += getUnknownFields().getSerializedSize();
|
|
|
+ memoizedSerializedSize = size;
|
|
|
+ return size;
|
|
|
+ }
|
|
|
+
|
|
|
+ private static final long serialVersionUID = 0L;
|
|
|
+ @java.lang.Override
|
|
|
+ protected java.lang.Object writeReplace()
|
|
|
+ throws java.io.ObjectStreamException {
|
|
|
+ return super.writeReplace();
|
|
|
+ }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public boolean equals(final java.lang.Object obj) {
|
|
|
+ if (obj == this) {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2)) {
|
|
|
+ return super.equals(obj);
|
|
|
+ }
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2) obj;
|
|
|
+
|
|
|
+ boolean result = true;
|
|
|
+ result = result && (hasSleepTime() == other.hasSleepTime());
|
|
|
+ if (hasSleepTime()) {
|
|
|
+ result = result && (getSleepTime()
|
|
|
+ == other.getSleepTime());
|
|
|
+ }
|
|
|
+ result = result &&
|
|
|
+ getUnknownFields().equals(other.getUnknownFields());
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedHashCode = 0;
|
|
|
+ @java.lang.Override
|
|
|
+ public int hashCode() {
|
|
|
+ if (memoizedHashCode != 0) {
|
|
|
+ return memoizedHashCode;
|
|
|
+ }
|
|
|
+ int hash = 41;
|
|
|
+ hash = (19 * hash) + getDescriptorForType().hashCode();
|
|
|
+ if (hasSleepTime()) {
|
|
|
+ hash = (37 * hash) + SLEEP_TIME_FIELD_NUMBER;
|
|
|
+ hash = (53 * hash) + hashLong(getSleepTime());
|
|
|
+ }
|
|
|
+ hash = (29 * hash) + getUnknownFields().hashCode();
|
|
|
+ memoizedHashCode = hash;
|
|
|
+ return hash;
|
|
|
+ }
|
|
|
+
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseFrom(
|
|
|
+ com.google.protobuf.ByteString data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseFrom(
|
|
|
+ com.google.protobuf.ByteString data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseFrom(byte[] data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseFrom(
|
|
|
+ byte[] data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseDelimitedFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseDelimitedFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static Builder newBuilder() { return Builder.create(); }
|
|
|
+ public Builder newBuilderForType() { return newBuilder(); }
|
|
|
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 prototype) {
|
|
|
+ return newBuilder().mergeFrom(prototype);
|
|
|
+ }
|
|
|
+ public Builder toBuilder() { return newBuilder(this); }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ protected Builder newBuilderForType(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ Builder builder = new Builder(parent);
|
|
|
+ return builder;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.SleepRequestProto2}
|
|
|
+ */
|
|
|
+ public static final class Builder extends
|
|
|
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
|
|
|
+ implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2OrBuilder {
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepRequestProto2_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepRequestProto2_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2.newBuilder()
|
|
|
+ private Builder() {
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+
|
|
|
+ private Builder(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ super(parent);
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+ private void maybeForceBuilderInitialization() {
|
|
|
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
|
|
|
+ }
|
|
|
+ }
|
|
|
+ private static Builder create() {
|
|
|
+ return new Builder();
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clear() {
|
|
|
+ super.clear();
|
|
|
+ sleepTime_ = 0L;
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clone() {
|
|
|
+ return create().mergeFrom(buildPartial());
|
|
|
+ }
|
|
|
+
|
|
|
+ public com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptorForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepRequestProto2_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 getDefaultInstanceForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2.getDefaultInstance();
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 build() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 result = buildPartial();
|
|
|
+ if (!result.isInitialized()) {
|
|
|
+ throw newUninitializedMessageException(result);
|
|
|
+ }
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 buildPartial() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2(this);
|
|
|
+ int from_bitField0_ = bitField0_;
|
|
|
+ int to_bitField0_ = 0;
|
|
|
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ to_bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ result.sleepTime_ = sleepTime_;
|
|
|
+ result.bitField0_ = to_bitField0_;
|
|
|
+ onBuilt();
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(com.google.protobuf.Message other) {
|
|
|
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2) {
|
|
|
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2)other);
|
|
|
+ } else {
|
|
|
+ super.mergeFrom(other);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 other) {
|
|
|
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2.getDefaultInstance()) return this;
|
|
|
+ if (other.hasSleepTime()) {
|
|
|
+ setSleepTime(other.getSleepTime());
|
|
|
+ }
|
|
|
+ this.mergeUnknownFields(other.getUnknownFields());
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parsedMessage = null;
|
|
|
+ try {
|
|
|
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2) e.getUnfinishedMessage();
|
|
|
+ throw e;
|
|
|
+ } finally {
|
|
|
+ if (parsedMessage != null) {
|
|
|
+ mergeFrom(parsedMessage);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ private int bitField0_;
|
|
|
+
|
|
|
+ // optional int64 sleep_time = 1;
|
|
|
+ private long sleepTime_ ;
|
|
|
+ /**
|
|
|
+ * <code>optional int64 sleep_time = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean hasSleepTime() {
|
|
|
+ return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional int64 sleep_time = 1;</code>
|
|
|
+ */
|
|
|
+ public long getSleepTime() {
|
|
|
+ return sleepTime_;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional int64 sleep_time = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder setSleepTime(long value) {
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ sleepTime_ = value;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional int64 sleep_time = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder clearSleepTime() {
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ sleepTime_ = 0L;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(builder_scope:hadoop.common.SleepRequestProto2)
|
|
|
+ }
|
|
|
+
|
|
|
+ static {
|
|
|
+ defaultInstance = new SleepRequestProto2(true);
|
|
|
+ defaultInstance.initFields();
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(class_scope:hadoop.common.SleepRequestProto2)
|
|
|
+ }
|
|
|
+
|
|
|
+ public interface SleepResponseProto2OrBuilder
|
|
|
+ extends com.google.protobuf.MessageOrBuilder {
|
|
|
+
|
|
|
+ // optional int64 receive_time = 1;
|
|
|
+ /**
|
|
|
+ * <code>optional int64 receive_time = 1;</code>
|
|
|
+ */
|
|
|
+ boolean hasReceiveTime();
|
|
|
+ /**
|
|
|
+ * <code>optional int64 receive_time = 1;</code>
|
|
|
+ */
|
|
|
+ long getReceiveTime();
|
|
|
+
|
|
|
+ // optional int64 response_time = 2;
|
|
|
+ /**
|
|
|
+ * <code>optional int64 response_time = 2;</code>
|
|
|
+ */
|
|
|
+ boolean hasResponseTime();
|
|
|
+ /**
|
|
|
+ * <code>optional int64 response_time = 2;</code>
|
|
|
+ */
|
|
|
+ long getResponseTime();
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.SleepResponseProto2}
|
|
|
+ */
|
|
|
+ public static final class SleepResponseProto2 extends
|
|
|
+ com.google.protobuf.GeneratedMessage
|
|
|
+ implements SleepResponseProto2OrBuilder {
|
|
|
+ // Use SleepResponseProto2.newBuilder() to construct.
|
|
|
+ private SleepResponseProto2(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
|
|
+ super(builder);
|
|
|
+ this.unknownFields = builder.getUnknownFields();
|
|
|
+ }
|
|
|
+ private SleepResponseProto2(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
|
|
+
|
|
|
+ private static final SleepResponseProto2 defaultInstance;
|
|
|
+ public static SleepResponseProto2 getDefaultInstance() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ public SleepResponseProto2 getDefaultInstanceForType() {
|
|
|
+ return defaultInstance;
|
|
|
+ }
|
|
|
+
|
|
|
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
|
|
|
+ @java.lang.Override
|
|
|
+ public final com.google.protobuf.UnknownFieldSet
|
|
|
+ getUnknownFields() {
|
|
|
+ return this.unknownFields;
|
|
|
+ }
|
|
|
+ private SleepResponseProto2(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ initFields();
|
|
|
+ int mutable_bitField0_ = 0;
|
|
|
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
|
|
+ com.google.protobuf.UnknownFieldSet.newBuilder();
|
|
|
+ try {
|
|
|
+ boolean done = false;
|
|
|
+ while (!done) {
|
|
|
+ int tag = input.readTag();
|
|
|
+ switch (tag) {
|
|
|
+ case 0:
|
|
|
+ done = true;
|
|
|
+ break;
|
|
|
+ default: {
|
|
|
+ if (!parseUnknownField(input, unknownFields,
|
|
|
+ extensionRegistry, tag)) {
|
|
|
+ done = true;
|
|
|
+ }
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 8: {
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ receiveTime_ = input.readInt64();
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 16: {
|
|
|
+ bitField0_ |= 0x00000002;
|
|
|
+ responseTime_ = input.readInt64();
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ throw e.setUnfinishedMessage(this);
|
|
|
+ } catch (java.io.IOException e) {
|
|
|
+ throw new com.google.protobuf.InvalidProtocolBufferException(
|
|
|
+ e.getMessage()).setUnfinishedMessage(this);
|
|
|
+ } finally {
|
|
|
+ this.unknownFields = unknownFields.build();
|
|
|
+ makeExtensionsImmutable();
|
|
|
+ }
|
|
|
+ }
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepResponseProto2_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepResponseProto2_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static com.google.protobuf.Parser<SleepResponseProto2> PARSER =
|
|
|
+ new com.google.protobuf.AbstractParser<SleepResponseProto2>() {
|
|
|
+ public SleepResponseProto2 parsePartialFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return new SleepResponseProto2(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ };
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public com.google.protobuf.Parser<SleepResponseProto2> getParserForType() {
|
|
|
+ return PARSER;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int bitField0_;
|
|
|
+ // optional int64 receive_time = 1;
|
|
|
+ public static final int RECEIVE_TIME_FIELD_NUMBER = 1;
|
|
|
+ private long receiveTime_;
|
|
|
+ /**
|
|
|
+ * <code>optional int64 receive_time = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean hasReceiveTime() {
|
|
|
+ return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional int64 receive_time = 1;</code>
|
|
|
+ */
|
|
|
+ public long getReceiveTime() {
|
|
|
+ return receiveTime_;
|
|
|
+ }
|
|
|
+
|
|
|
+ // optional int64 response_time = 2;
|
|
|
+ public static final int RESPONSE_TIME_FIELD_NUMBER = 2;
|
|
|
+ private long responseTime_;
|
|
|
+ /**
|
|
|
+ * <code>optional int64 response_time = 2;</code>
|
|
|
+ */
|
|
|
+ public boolean hasResponseTime() {
|
|
|
+ return ((bitField0_ & 0x00000002) == 0x00000002);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional int64 response_time = 2;</code>
|
|
|
+ */
|
|
|
+ public long getResponseTime() {
|
|
|
+ return responseTime_;
|
|
|
+ }
|
|
|
+
|
|
|
+ private void initFields() {
|
|
|
+ receiveTime_ = 0L;
|
|
|
+ responseTime_ = 0L;
|
|
|
+ }
|
|
|
+ private byte memoizedIsInitialized = -1;
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ byte isInitialized = memoizedIsInitialized;
|
|
|
+ if (isInitialized != -1) return isInitialized == 1;
|
|
|
+
|
|
|
+ memoizedIsInitialized = 1;
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
|
|
|
+ throws java.io.IOException {
|
|
|
+ getSerializedSize();
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ output.writeInt64(1, receiveTime_);
|
|
|
+ }
|
|
|
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
|
|
|
+ output.writeInt64(2, responseTime_);
|
|
|
+ }
|
|
|
+ getUnknownFields().writeTo(output);
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedSerializedSize = -1;
|
|
|
+ public int getSerializedSize() {
|
|
|
+ int size = memoizedSerializedSize;
|
|
|
+ if (size != -1) return size;
|
|
|
+
|
|
|
+ size = 0;
|
|
|
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ size += com.google.protobuf.CodedOutputStream
|
|
|
+ .computeInt64Size(1, receiveTime_);
|
|
|
+ }
|
|
|
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
|
|
|
+ size += com.google.protobuf.CodedOutputStream
|
|
|
+ .computeInt64Size(2, responseTime_);
|
|
|
+ }
|
|
|
+ size += getUnknownFields().getSerializedSize();
|
|
|
+ memoizedSerializedSize = size;
|
|
|
+ return size;
|
|
|
+ }
|
|
|
+
|
|
|
+ private static final long serialVersionUID = 0L;
|
|
|
+ @java.lang.Override
|
|
|
+ protected java.lang.Object writeReplace()
|
|
|
+ throws java.io.ObjectStreamException {
|
|
|
+ return super.writeReplace();
|
|
|
+ }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ public boolean equals(final java.lang.Object obj) {
|
|
|
+ if (obj == this) {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2)) {
|
|
|
+ return super.equals(obj);
|
|
|
+ }
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2) obj;
|
|
|
+
|
|
|
+ boolean result = true;
|
|
|
+ result = result && (hasReceiveTime() == other.hasReceiveTime());
|
|
|
+ if (hasReceiveTime()) {
|
|
|
+ result = result && (getReceiveTime()
|
|
|
+ == other.getReceiveTime());
|
|
|
+ }
|
|
|
+ result = result && (hasResponseTime() == other.hasResponseTime());
|
|
|
+ if (hasResponseTime()) {
|
|
|
+ result = result && (getResponseTime()
|
|
|
+ == other.getResponseTime());
|
|
|
+ }
|
|
|
+ result = result &&
|
|
|
+ getUnknownFields().equals(other.getUnknownFields());
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ private int memoizedHashCode = 0;
|
|
|
+ @java.lang.Override
|
|
|
+ public int hashCode() {
|
|
|
+ if (memoizedHashCode != 0) {
|
|
|
+ return memoizedHashCode;
|
|
|
+ }
|
|
|
+ int hash = 41;
|
|
|
+ hash = (19 * hash) + getDescriptorForType().hashCode();
|
|
|
+ if (hasReceiveTime()) {
|
|
|
+ hash = (37 * hash) + RECEIVE_TIME_FIELD_NUMBER;
|
|
|
+ hash = (53 * hash) + hashLong(getReceiveTime());
|
|
|
+ }
|
|
|
+ if (hasResponseTime()) {
|
|
|
+ hash = (37 * hash) + RESPONSE_TIME_FIELD_NUMBER;
|
|
|
+ hash = (53 * hash) + hashLong(getResponseTime());
|
|
|
+ }
|
|
|
+ hash = (29 * hash) + getUnknownFields().hashCode();
|
|
|
+ memoizedHashCode = hash;
|
|
|
+ return hash;
|
|
|
+ }
|
|
|
+
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 parseFrom(
|
|
|
+ com.google.protobuf.ByteString data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 parseFrom(
|
|
|
+ com.google.protobuf.ByteString data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 parseFrom(byte[] data)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 parseFrom(
|
|
|
+ byte[] data,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws com.google.protobuf.InvalidProtocolBufferException {
|
|
|
+ return PARSER.parseFrom(data, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 parseFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 parseFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 parseDelimitedFrom(java.io.InputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 parseDelimitedFrom(
|
|
|
+ java.io.InputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input);
|
|
|
+ }
|
|
|
+ public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 parseFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ return PARSER.parseFrom(input, extensionRegistry);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static Builder newBuilder() { return Builder.create(); }
|
|
|
+ public Builder newBuilderForType() { return newBuilder(); }
|
|
|
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 prototype) {
|
|
|
+ return newBuilder().mergeFrom(prototype);
|
|
|
+ }
|
|
|
+ public Builder toBuilder() { return newBuilder(this); }
|
|
|
+
|
|
|
+ @java.lang.Override
|
|
|
+ protected Builder newBuilderForType(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ Builder builder = new Builder(parent);
|
|
|
+ return builder;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * Protobuf type {@code hadoop.common.SleepResponseProto2}
|
|
|
+ */
|
|
|
+ public static final class Builder extends
|
|
|
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
|
|
|
+ implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2OrBuilder {
|
|
|
+ public static final com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepResponseProto2_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internalGetFieldAccessorTable() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepResponseProto2_fieldAccessorTable
|
|
|
+ .ensureFieldAccessorsInitialized(
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.Builder.class);
|
|
|
+ }
|
|
|
+
|
|
|
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.newBuilder()
|
|
|
+ private Builder() {
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+
|
|
|
+ private Builder(
|
|
|
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
|
+ super(parent);
|
|
|
+ maybeForceBuilderInitialization();
|
|
|
+ }
|
|
|
+ private void maybeForceBuilderInitialization() {
|
|
|
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
|
|
|
+ }
|
|
|
+ }
|
|
|
+ private static Builder create() {
|
|
|
+ return new Builder();
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clear() {
|
|
|
+ super.clear();
|
|
|
+ receiveTime_ = 0L;
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ responseTime_ = 0L;
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000002);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder clone() {
|
|
|
+ return create().mergeFrom(buildPartial());
|
|
|
+ }
|
|
|
+
|
|
|
+ public com.google.protobuf.Descriptors.Descriptor
|
|
|
+ getDescriptorForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepResponseProto2_descriptor;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 getDefaultInstanceForType() {
|
|
|
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.getDefaultInstance();
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 build() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 result = buildPartial();
|
|
|
+ if (!result.isInitialized()) {
|
|
|
+ throw newUninitializedMessageException(result);
|
|
|
+ }
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 buildPartial() {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2(this);
|
|
|
+ int from_bitField0_ = bitField0_;
|
|
|
+ int to_bitField0_ = 0;
|
|
|
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
|
|
|
+ to_bitField0_ |= 0x00000001;
|
|
|
+ }
|
|
|
+ result.receiveTime_ = receiveTime_;
|
|
|
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
|
|
|
+ to_bitField0_ |= 0x00000002;
|
|
|
+ }
|
|
|
+ result.responseTime_ = responseTime_;
|
|
|
+ result.bitField0_ = to_bitField0_;
|
|
|
+ onBuilt();
|
|
|
+ return result;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(com.google.protobuf.Message other) {
|
|
|
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2) {
|
|
|
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2)other);
|
|
|
+ } else {
|
|
|
+ super.mergeFrom(other);
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 other) {
|
|
|
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.getDefaultInstance()) return this;
|
|
|
+ if (other.hasReceiveTime()) {
|
|
|
+ setReceiveTime(other.getReceiveTime());
|
|
|
+ }
|
|
|
+ if (other.hasResponseTime()) {
|
|
|
+ setResponseTime(other.getResponseTime());
|
|
|
+ }
|
|
|
+ this.mergeUnknownFields(other.getUnknownFields());
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ public final boolean isInitialized() {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+ public Builder mergeFrom(
|
|
|
+ com.google.protobuf.CodedInputStream input,
|
|
|
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
|
+ throws java.io.IOException {
|
|
|
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 parsedMessage = null;
|
|
|
+ try {
|
|
|
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
|
|
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
|
+ parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2) e.getUnfinishedMessage();
|
|
|
+ throw e;
|
|
|
+ } finally {
|
|
|
+ if (parsedMessage != null) {
|
|
|
+ mergeFrom(parsedMessage);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ private int bitField0_;
|
|
|
+
|
|
|
+ // optional int64 receive_time = 1;
|
|
|
+ private long receiveTime_ ;
|
|
|
+ /**
|
|
|
+ * <code>optional int64 receive_time = 1;</code>
|
|
|
+ */
|
|
|
+ public boolean hasReceiveTime() {
|
|
|
+ return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional int64 receive_time = 1;</code>
|
|
|
+ */
|
|
|
+ public long getReceiveTime() {
|
|
|
+ return receiveTime_;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional int64 receive_time = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder setReceiveTime(long value) {
|
|
|
+ bitField0_ |= 0x00000001;
|
|
|
+ receiveTime_ = value;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional int64 receive_time = 1;</code>
|
|
|
+ */
|
|
|
+ public Builder clearReceiveTime() {
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000001);
|
|
|
+ receiveTime_ = 0L;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ // optional int64 response_time = 2;
|
|
|
+ private long responseTime_ ;
|
|
|
+ /**
|
|
|
+ * <code>optional int64 response_time = 2;</code>
|
|
|
+ */
|
|
|
+ public boolean hasResponseTime() {
|
|
|
+ return ((bitField0_ & 0x00000002) == 0x00000002);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional int64 response_time = 2;</code>
|
|
|
+ */
|
|
|
+ public long getResponseTime() {
|
|
|
+ return responseTime_;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional int64 response_time = 2;</code>
|
|
|
+ */
|
|
|
+ public Builder setResponseTime(long value) {
|
|
|
+ bitField0_ |= 0x00000002;
|
|
|
+ responseTime_ = value;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * <code>optional int64 response_time = 2;</code>
|
|
|
+ */
|
|
|
+ public Builder clearResponseTime() {
|
|
|
+ bitField0_ = (bitField0_ & ~0x00000002);
|
|
|
+ responseTime_ = 0L;
|
|
|
+ onChanged();
|
|
|
+ return this;
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(builder_scope:hadoop.common.SleepResponseProto2)
|
|
|
+ }
|
|
|
+
|
|
|
+ static {
|
|
|
+ defaultInstance = new SleepResponseProto2(true);
|
|
|
+ defaultInstance.initFields();
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(class_scope:hadoop.common.SleepResponseProto2)
|
|
|
+ }
|
|
|
+
|
|
|
+ private static com.google.protobuf.Descriptors.Descriptor
|
|
|
+ internal_static_hadoop_common_EmptyRequestProto_descriptor;
|
|
|
+ private static
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable;
|
|
|
+ private static com.google.protobuf.Descriptors.Descriptor
|
|
|
+ internal_static_hadoop_common_EmptyResponseProto_descriptor;
|
|
|
+ private static
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable;
|
|
|
+ private static com.google.protobuf.Descriptors.Descriptor
|
|
|
+ internal_static_hadoop_common_EchoRequestProto_descriptor;
|
|
|
+ private static
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable;
|
|
|
+ private static com.google.protobuf.Descriptors.Descriptor
|
|
|
+ internal_static_hadoop_common_EchoResponseProto_descriptor;
|
|
|
+ private static
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable;
|
|
|
+ private static com.google.protobuf.Descriptors.Descriptor
|
|
|
+ internal_static_hadoop_common_OptRequestProto_descriptor;
|
|
|
+ private static
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internal_static_hadoop_common_OptRequestProto_fieldAccessorTable;
|
|
|
+ private static com.google.protobuf.Descriptors.Descriptor
|
|
|
+ internal_static_hadoop_common_OptResponseProto_descriptor;
|
|
|
+ private static
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internal_static_hadoop_common_OptResponseProto_fieldAccessorTable;
|
|
|
+ private static com.google.protobuf.Descriptors.Descriptor
|
|
|
+ internal_static_hadoop_common_SleepRequestProto_descriptor;
|
|
|
+ private static
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internal_static_hadoop_common_SleepRequestProto_fieldAccessorTable;
|
|
|
+ private static com.google.protobuf.Descriptors.Descriptor
|
|
|
+ internal_static_hadoop_common_SleepResponseProto_descriptor;
|
|
|
+ private static
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internal_static_hadoop_common_SleepResponseProto_fieldAccessorTable;
|
|
|
+ private static com.google.protobuf.Descriptors.Descriptor
|
|
|
+ internal_static_hadoop_common_SlowPingRequestProto_descriptor;
|
|
|
+ private static
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internal_static_hadoop_common_SlowPingRequestProto_fieldAccessorTable;
|
|
|
+ private static com.google.protobuf.Descriptors.Descriptor
|
|
|
+ internal_static_hadoop_common_EchoRequestProto2_descriptor;
|
|
|
+ private static
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internal_static_hadoop_common_EchoRequestProto2_fieldAccessorTable;
|
|
|
+ private static com.google.protobuf.Descriptors.Descriptor
|
|
|
+ internal_static_hadoop_common_EchoResponseProto2_descriptor;
|
|
|
+ private static
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internal_static_hadoop_common_EchoResponseProto2_fieldAccessorTable;
|
|
|
+ private static com.google.protobuf.Descriptors.Descriptor
|
|
|
+ internal_static_hadoop_common_AddRequestProto_descriptor;
|
|
|
+ private static
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internal_static_hadoop_common_AddRequestProto_fieldAccessorTable;
|
|
|
+ private static com.google.protobuf.Descriptors.Descriptor
|
|
|
+ internal_static_hadoop_common_AddRequestProto2_descriptor;
|
|
|
+ private static
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internal_static_hadoop_common_AddRequestProto2_fieldAccessorTable;
|
|
|
+ private static com.google.protobuf.Descriptors.Descriptor
|
|
|
+ internal_static_hadoop_common_AddResponseProto_descriptor;
|
|
|
+ private static
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internal_static_hadoop_common_AddResponseProto_fieldAccessorTable;
|
|
|
+ private static com.google.protobuf.Descriptors.Descriptor
|
|
|
+ internal_static_hadoop_common_ExchangeRequestProto_descriptor;
|
|
|
+ private static
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internal_static_hadoop_common_ExchangeRequestProto_fieldAccessorTable;
|
|
|
+ private static com.google.protobuf.Descriptors.Descriptor
|
|
|
+ internal_static_hadoop_common_ExchangeResponseProto_descriptor;
|
|
|
+ private static
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internal_static_hadoop_common_ExchangeResponseProto_fieldAccessorTable;
|
|
|
+ private static com.google.protobuf.Descriptors.Descriptor
|
|
|
+ internal_static_hadoop_common_AuthMethodResponseProto_descriptor;
|
|
|
+ private static
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internal_static_hadoop_common_AuthMethodResponseProto_fieldAccessorTable;
|
|
|
+ private static com.google.protobuf.Descriptors.Descriptor
|
|
|
+ internal_static_hadoop_common_UserResponseProto_descriptor;
|
|
|
+ private static
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internal_static_hadoop_common_UserResponseProto_fieldAccessorTable;
|
|
|
+ private static com.google.protobuf.Descriptors.Descriptor
|
|
|
+ internal_static_hadoop_common_SleepRequestProto2_descriptor;
|
|
|
+ private static
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internal_static_hadoop_common_SleepRequestProto2_fieldAccessorTable;
|
|
|
+ private static com.google.protobuf.Descriptors.Descriptor
|
|
|
+ internal_static_hadoop_common_SleepResponseProto2_descriptor;
|
|
|
+ private static
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
|
+ internal_static_hadoop_common_SleepResponseProto2_fieldAccessorTable;
|
|
|
+
|
|
|
+ public static com.google.protobuf.Descriptors.FileDescriptor
|
|
|
+ getDescriptor() {
|
|
|
+ return descriptor;
|
|
|
+ }
|
|
|
+ private static com.google.protobuf.Descriptors.FileDescriptor
|
|
|
+ descriptor;
|
|
|
+ static {
|
|
|
+ java.lang.String[] descriptorData = {
|
|
|
+ "\n\021test_legacy.proto\022\rhadoop.common\"\023\n\021Em" +
|
|
|
+ "ptyRequestProto\"\024\n\022EmptyResponseProto\"#\n" +
|
|
|
+ "\020EchoRequestProto\022\017\n\007message\030\001 \002(\t\"$\n\021Ec" +
|
|
|
+ "hoResponseProto\022\017\n\007message\030\001 \002(\t\"\"\n\017OptR" +
|
|
|
+ "equestProto\022\017\n\007message\030\001 \001(\t\"#\n\020OptRespo" +
|
|
|
+ "nseProto\022\017\n\007message\030\001 \001(\t\")\n\021SleepReques" +
|
|
|
+ "tProto\022\024\n\014milliSeconds\030\001 \002(\005\"\024\n\022SleepRes" +
|
|
|
+ "ponseProto\"*\n\024SlowPingRequestProto\022\022\n\nsh" +
|
|
|
+ "ouldSlow\030\001 \002(\010\"$\n\021EchoRequestProto2\022\017\n\007m" +
|
|
|
+ "essage\030\001 \003(\t\"%\n\022EchoResponseProto2\022\017\n\007me",
|
|
|
+ "ssage\030\001 \003(\t\"1\n\017AddRequestProto\022\016\n\006param1" +
|
|
|
+ "\030\001 \002(\005\022\016\n\006param2\030\002 \002(\005\"\"\n\020AddRequestProt" +
|
|
|
+ "o2\022\016\n\006params\030\001 \003(\005\"\"\n\020AddResponseProto\022\016" +
|
|
|
+ "\n\006result\030\001 \002(\005\"&\n\024ExchangeRequestProto\022\016" +
|
|
|
+ "\n\006values\030\001 \003(\005\"\'\n\025ExchangeResponseProto\022" +
|
|
|
+ "\016\n\006values\030\001 \003(\005\">\n\027AuthMethodResponsePro" +
|
|
|
+ "to\022\014\n\004code\030\001 \002(\005\022\025\n\rmechanismName\030\002 \002(\t\"" +
|
|
|
+ "!\n\021UserResponseProto\022\014\n\004user\030\001 \002(\t\"(\n\022Sl" +
|
|
|
+ "eepRequestProto2\022\022\n\nsleep_time\030\001 \001(\003\"B\n\023" +
|
|
|
+ "SleepResponseProto2\022\024\n\014receive_time\030\001 \001(",
|
|
|
+ "\003\022\025\n\rresponse_time\030\002 \001(\003B5\n\036org.apache.h" +
|
|
|
+ "adoop.ipc.protobufB\020TestProtosLegacy\240\001\001"
|
|
|
+ };
|
|
|
+ com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
|
|
+ new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
|
|
+ public com.google.protobuf.ExtensionRegistry assignDescriptors(
|
|
|
+ com.google.protobuf.Descriptors.FileDescriptor root) {
|
|
|
+ descriptor = root;
|
|
|
+ internal_static_hadoop_common_EmptyRequestProto_descriptor =
|
|
|
+ getDescriptor().getMessageTypes().get(0);
|
|
|
+ internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable = new
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
|
|
+ internal_static_hadoop_common_EmptyRequestProto_descriptor,
|
|
|
+ new java.lang.String[] { });
|
|
|
+ internal_static_hadoop_common_EmptyResponseProto_descriptor =
|
|
|
+ getDescriptor().getMessageTypes().get(1);
|
|
|
+ internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable = new
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
|
|
+ internal_static_hadoop_common_EmptyResponseProto_descriptor,
|
|
|
+ new java.lang.String[] { });
|
|
|
+ internal_static_hadoop_common_EchoRequestProto_descriptor =
|
|
|
+ getDescriptor().getMessageTypes().get(2);
|
|
|
+ internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable = new
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
|
|
+ internal_static_hadoop_common_EchoRequestProto_descriptor,
|
|
|
+ new java.lang.String[] { "Message", });
|
|
|
+ internal_static_hadoop_common_EchoResponseProto_descriptor =
|
|
|
+ getDescriptor().getMessageTypes().get(3);
|
|
|
+ internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable = new
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
|
|
+ internal_static_hadoop_common_EchoResponseProto_descriptor,
|
|
|
+ new java.lang.String[] { "Message", });
|
|
|
+ internal_static_hadoop_common_OptRequestProto_descriptor =
|
|
|
+ getDescriptor().getMessageTypes().get(4);
|
|
|
+ internal_static_hadoop_common_OptRequestProto_fieldAccessorTable = new
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
|
|
+ internal_static_hadoop_common_OptRequestProto_descriptor,
|
|
|
+ new java.lang.String[] { "Message", });
|
|
|
+ internal_static_hadoop_common_OptResponseProto_descriptor =
|
|
|
+ getDescriptor().getMessageTypes().get(5);
|
|
|
+ internal_static_hadoop_common_OptResponseProto_fieldAccessorTable = new
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
|
|
+ internal_static_hadoop_common_OptResponseProto_descriptor,
|
|
|
+ new java.lang.String[] { "Message", });
|
|
|
+ internal_static_hadoop_common_SleepRequestProto_descriptor =
|
|
|
+ getDescriptor().getMessageTypes().get(6);
|
|
|
+ internal_static_hadoop_common_SleepRequestProto_fieldAccessorTable = new
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
|
|
+ internal_static_hadoop_common_SleepRequestProto_descriptor,
|
|
|
+ new java.lang.String[] { "MilliSeconds", });
|
|
|
+ internal_static_hadoop_common_SleepResponseProto_descriptor =
|
|
|
+ getDescriptor().getMessageTypes().get(7);
|
|
|
+ internal_static_hadoop_common_SleepResponseProto_fieldAccessorTable = new
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
|
|
+ internal_static_hadoop_common_SleepResponseProto_descriptor,
|
|
|
+ new java.lang.String[] { });
|
|
|
+ internal_static_hadoop_common_SlowPingRequestProto_descriptor =
|
|
|
+ getDescriptor().getMessageTypes().get(8);
|
|
|
+ internal_static_hadoop_common_SlowPingRequestProto_fieldAccessorTable = new
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
|
|
+ internal_static_hadoop_common_SlowPingRequestProto_descriptor,
|
|
|
+ new java.lang.String[] { "ShouldSlow", });
|
|
|
+ internal_static_hadoop_common_EchoRequestProto2_descriptor =
|
|
|
+ getDescriptor().getMessageTypes().get(9);
|
|
|
+ internal_static_hadoop_common_EchoRequestProto2_fieldAccessorTable = new
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
|
|
+ internal_static_hadoop_common_EchoRequestProto2_descriptor,
|
|
|
+ new java.lang.String[] { "Message", });
|
|
|
+ internal_static_hadoop_common_EchoResponseProto2_descriptor =
|
|
|
+ getDescriptor().getMessageTypes().get(10);
|
|
|
+ internal_static_hadoop_common_EchoResponseProto2_fieldAccessorTable = new
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
|
|
+ internal_static_hadoop_common_EchoResponseProto2_descriptor,
|
|
|
+ new java.lang.String[] { "Message", });
|
|
|
+ internal_static_hadoop_common_AddRequestProto_descriptor =
|
|
|
+ getDescriptor().getMessageTypes().get(11);
|
|
|
+ internal_static_hadoop_common_AddRequestProto_fieldAccessorTable = new
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
|
|
+ internal_static_hadoop_common_AddRequestProto_descriptor,
|
|
|
+ new java.lang.String[] { "Param1", "Param2", });
|
|
|
+ internal_static_hadoop_common_AddRequestProto2_descriptor =
|
|
|
+ getDescriptor().getMessageTypes().get(12);
|
|
|
+ internal_static_hadoop_common_AddRequestProto2_fieldAccessorTable = new
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
|
|
+ internal_static_hadoop_common_AddRequestProto2_descriptor,
|
|
|
+ new java.lang.String[] { "Params", });
|
|
|
+ internal_static_hadoop_common_AddResponseProto_descriptor =
|
|
|
+ getDescriptor().getMessageTypes().get(13);
|
|
|
+ internal_static_hadoop_common_AddResponseProto_fieldAccessorTable = new
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
|
|
+ internal_static_hadoop_common_AddResponseProto_descriptor,
|
|
|
+ new java.lang.String[] { "Result", });
|
|
|
+ internal_static_hadoop_common_ExchangeRequestProto_descriptor =
|
|
|
+ getDescriptor().getMessageTypes().get(14);
|
|
|
+ internal_static_hadoop_common_ExchangeRequestProto_fieldAccessorTable = new
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
|
|
+ internal_static_hadoop_common_ExchangeRequestProto_descriptor,
|
|
|
+ new java.lang.String[] { "Values", });
|
|
|
+ internal_static_hadoop_common_ExchangeResponseProto_descriptor =
|
|
|
+ getDescriptor().getMessageTypes().get(15);
|
|
|
+ internal_static_hadoop_common_ExchangeResponseProto_fieldAccessorTable = new
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
|
|
+ internal_static_hadoop_common_ExchangeResponseProto_descriptor,
|
|
|
+ new java.lang.String[] { "Values", });
|
|
|
+ internal_static_hadoop_common_AuthMethodResponseProto_descriptor =
|
|
|
+ getDescriptor().getMessageTypes().get(16);
|
|
|
+ internal_static_hadoop_common_AuthMethodResponseProto_fieldAccessorTable = new
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
|
|
+ internal_static_hadoop_common_AuthMethodResponseProto_descriptor,
|
|
|
+ new java.lang.String[] { "Code", "MechanismName", });
|
|
|
+ internal_static_hadoop_common_UserResponseProto_descriptor =
|
|
|
+ getDescriptor().getMessageTypes().get(17);
|
|
|
+ internal_static_hadoop_common_UserResponseProto_fieldAccessorTable = new
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
|
|
+ internal_static_hadoop_common_UserResponseProto_descriptor,
|
|
|
+ new java.lang.String[] { "User", });
|
|
|
+ internal_static_hadoop_common_SleepRequestProto2_descriptor =
|
|
|
+ getDescriptor().getMessageTypes().get(18);
|
|
|
+ internal_static_hadoop_common_SleepRequestProto2_fieldAccessorTable = new
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
|
|
+ internal_static_hadoop_common_SleepRequestProto2_descriptor,
|
|
|
+ new java.lang.String[] { "SleepTime", });
|
|
|
+ internal_static_hadoop_common_SleepResponseProto2_descriptor =
|
|
|
+ getDescriptor().getMessageTypes().get(19);
|
|
|
+ internal_static_hadoop_common_SleepResponseProto2_fieldAccessorTable = new
|
|
|
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
|
|
+ internal_static_hadoop_common_SleepResponseProto2_descriptor,
|
|
|
+ new java.lang.String[] { "ReceiveTime", "ResponseTime", });
|
|
|
+ return null;
|
|
|
+ }
|
|
|
+ };
|
|
|
+ com.google.protobuf.Descriptors.FileDescriptor
|
|
|
+ .internalBuildGeneratedFileFrom(descriptorData,
|
|
|
+ new com.google.protobuf.Descriptors.FileDescriptor[] {
|
|
|
+ }, assigner);
|
|
|
+ }
|
|
|
+
|
|
|
+ // @@protoc_insertion_point(outer_class_scope)
|
|
|
+}
|