org.apache.hadoop.ipc.protobuf.TestProtos Maven / Gradle / Ivy
// Generated by the protocol buffer org.apache.hadoop.shaded.com.iler. DO NOT EDIT!
// source: test.proto
package org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf;
public final class TestProtos {
private TestProtos() {}
public static void registerAllExtensions(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite) registry);
}
public interface EmptyRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.org.apache.hadoop.shaded.com.on.EmptyRequestProto)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.EmptyRequestProto}
*/
public static final class EmptyRequestProto extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.org.apache.hadoop.shaded.com.on.EmptyRequestProto)
EmptyRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use EmptyRequestProto.newBuilder() to construct.
private EmptyRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private EmptyRequestProto() {
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private EmptyRequestProto(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto) obj;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.EmptyRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.org.apache.hadoop.shaded.com.on.EmptyRequestProto)
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto build() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto result = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.org.apache.hadoop.shaded.com.on.EmptyRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.org.apache.hadoop.shaded.com.on.EmptyRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public EmptyRequestProto parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new EmptyRequestProto(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface EmptyResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.org.apache.hadoop.shaded.com.on.EmptyResponseProto)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.EmptyResponseProto}
*/
public static final class EmptyResponseProto extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.org.apache.hadoop.shaded.com.on.EmptyResponseProto)
EmptyResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use EmptyResponseProto.newBuilder() to construct.
private EmptyResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private EmptyResponseProto() {
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private EmptyResponseProto(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) obj;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.EmptyResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.org.apache.hadoop.shaded.com.on.EmptyResponseProto)
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto build() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto result = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.org.apache.hadoop.shaded.com.on.EmptyResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.org.apache.hadoop.shaded.com.on.EmptyResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public EmptyResponseProto parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new EmptyResponseProto(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface EchoRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.org.apache.hadoop.shaded.com.on.EchoRequestProto)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required string message = 1;
*/
boolean hasMessage();
/**
* required string message = 1;
*/
java.lang.String getMessage();
/**
* required string message = 1;
*/
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes();
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.EchoRequestProto}
*/
public static final class EchoRequestProto extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.org.apache.hadoop.shaded.com.on.EchoRequestProto)
EchoRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use EchoRequestProto.newBuilder() to construct.
private EchoRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private EchoRequestProto() {
message_ = "";
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private EchoRequestProto(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000001;
message_ = bs;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.Builder.class);
}
private int bitField0_;
public static final int MESSAGE_FIELD_NUMBER = 1;
private volatile java.lang.Object message_;
/**
* required string message = 1;
*/
public boolean hasMessage() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string message = 1;
*/
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
message_ = s;
}
return s;
}
}
/**
* required string message = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
message_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasMessage()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, message_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.org.apache.hadoop.shaded.com.uteStringSize(1, message_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto) obj;
if (hasMessage() != other.hasMessage()) return false;
if (hasMessage()) {
if (!getMessage()
.equals(other.getMessage())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasMessage()) {
hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getMessage().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.EchoRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.org.apache.hadoop.shaded.com.on.EchoRequestProto)
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
message_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto build() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto result = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.message_ = message_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance()) return this;
if (other.hasMessage()) {
bitField0_ |= 0x00000001;
message_ = other.message_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasMessage()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object message_ = "";
/**
* required string message = 1;
*/
public boolean hasMessage() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string message = 1;
*/
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
message_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string message = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
message_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string message = 1;
*/
public Builder setMessage(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
message_ = value;
onChanged();
return this;
}
/**
* required string message = 1;
*/
public Builder clearMessage() {
bitField0_ = (bitField0_ & ~0x00000001);
message_ = getDefaultInstance().getMessage();
onChanged();
return this;
}
/**
* required string message = 1;
*/
public Builder setMessageBytes(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
message_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.org.apache.hadoop.shaded.com.on.EchoRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.org.apache.hadoop.shaded.com.on.EchoRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public EchoRequestProto parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new EchoRequestProto(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface EchoResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.org.apache.hadoop.shaded.com.on.EchoResponseProto)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required string message = 1;
*/
boolean hasMessage();
/**
* required string message = 1;
*/
java.lang.String getMessage();
/**
* required string message = 1;
*/
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes();
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.EchoResponseProto}
*/
public static final class EchoResponseProto extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.org.apache.hadoop.shaded.com.on.EchoResponseProto)
EchoResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use EchoResponseProto.newBuilder() to construct.
private EchoResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private EchoResponseProto() {
message_ = "";
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private EchoResponseProto(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000001;
message_ = bs;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.Builder.class);
}
private int bitField0_;
public static final int MESSAGE_FIELD_NUMBER = 1;
private volatile java.lang.Object message_;
/**
* required string message = 1;
*/
public boolean hasMessage() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string message = 1;
*/
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
message_ = s;
}
return s;
}
}
/**
* required string message = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
message_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasMessage()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, message_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.org.apache.hadoop.shaded.com.uteStringSize(1, message_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) obj;
if (hasMessage() != other.hasMessage()) return false;
if (hasMessage()) {
if (!getMessage()
.equals(other.getMessage())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasMessage()) {
hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getMessage().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.EchoResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.org.apache.hadoop.shaded.com.on.EchoResponseProto)
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
message_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto build() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto result = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.message_ = message_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance()) return this;
if (other.hasMessage()) {
bitField0_ |= 0x00000001;
message_ = other.message_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasMessage()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object message_ = "";
/**
* required string message = 1;
*/
public boolean hasMessage() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string message = 1;
*/
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
message_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string message = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
message_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string message = 1;
*/
public Builder setMessage(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
message_ = value;
onChanged();
return this;
}
/**
* required string message = 1;
*/
public Builder clearMessage() {
bitField0_ = (bitField0_ & ~0x00000001);
message_ = getDefaultInstance().getMessage();
onChanged();
return this;
}
/**
* required string message = 1;
*/
public Builder setMessageBytes(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
message_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.org.apache.hadoop.shaded.com.on.EchoResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.org.apache.hadoop.shaded.com.on.EchoResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public EchoResponseProto parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new EchoResponseProto(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface OptRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.org.apache.hadoop.shaded.com.on.OptRequestProto)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional string message = 1;
*/
boolean hasMessage();
/**
* optional string message = 1;
*/
java.lang.String getMessage();
/**
* optional string message = 1;
*/
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes();
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.OptRequestProto}
*/
public static final class OptRequestProto extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.org.apache.hadoop.shaded.com.on.OptRequestProto)
OptRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use OptRequestProto.newBuilder() to construct.
private OptRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private OptRequestProto() {
message_ = "";
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private OptRequestProto(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000001;
message_ = bs;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_OptRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_OptRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto.Builder.class);
}
private int bitField0_;
public static final int MESSAGE_FIELD_NUMBER = 1;
private volatile java.lang.Object message_;
/**
* optional string message = 1;
*/
public boolean hasMessage() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string message = 1;
*/
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
message_ = s;
}
return s;
}
}
/**
* optional string message = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
message_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, message_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.org.apache.hadoop.shaded.com.uteStringSize(1, message_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto) obj;
if (hasMessage() != other.hasMessage()) return false;
if (hasMessage()) {
if (!getMessage()
.equals(other.getMessage())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasMessage()) {
hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getMessage().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.OptRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.org.apache.hadoop.shaded.com.on.OptRequestProto)
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_OptRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_OptRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
message_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_OptRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto build() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto result = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.message_ = message_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto.getDefaultInstance()) return this;
if (other.hasMessage()) {
bitField0_ |= 0x00000001;
message_ = other.message_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object message_ = "";
/**
* optional string message = 1;
*/
public boolean hasMessage() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string message = 1;
*/
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
message_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string message = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
message_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string message = 1;
*/
public Builder setMessage(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
message_ = value;
onChanged();
return this;
}
/**
* optional string message = 1;
*/
public Builder clearMessage() {
bitField0_ = (bitField0_ & ~0x00000001);
message_ = getDefaultInstance().getMessage();
onChanged();
return this;
}
/**
* optional string message = 1;
*/
public Builder setMessageBytes(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
message_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.org.apache.hadoop.shaded.com.on.OptRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.org.apache.hadoop.shaded.com.on.OptRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public OptRequestProto parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new OptRequestProto(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface OptResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.org.apache.hadoop.shaded.com.on.OptResponseProto)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional string message = 1;
*/
boolean hasMessage();
/**
* optional string message = 1;
*/
java.lang.String getMessage();
/**
* optional string message = 1;
*/
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes();
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.OptResponseProto}
*/
public static final class OptResponseProto extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.org.apache.hadoop.shaded.com.on.OptResponseProto)
OptResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use OptResponseProto.newBuilder() to construct.
private OptResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private OptResponseProto() {
message_ = "";
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private OptResponseProto(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000001;
message_ = bs;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_OptResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_OptResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.Builder.class);
}
private int bitField0_;
public static final int MESSAGE_FIELD_NUMBER = 1;
private volatile java.lang.Object message_;
/**
* optional string message = 1;
*/
public boolean hasMessage() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string message = 1;
*/
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
message_ = s;
}
return s;
}
}
/**
* optional string message = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
message_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, message_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.org.apache.hadoop.shaded.com.uteStringSize(1, message_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto) obj;
if (hasMessage() != other.hasMessage()) return false;
if (hasMessage()) {
if (!getMessage()
.equals(other.getMessage())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasMessage()) {
hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getMessage().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.OptResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.org.apache.hadoop.shaded.com.on.OptResponseProto)
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_OptResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_OptResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
message_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_OptResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto build() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto result = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.message_ = message_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.getDefaultInstance()) return this;
if (other.hasMessage()) {
bitField0_ |= 0x00000001;
message_ = other.message_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object message_ = "";
/**
* optional string message = 1;
*/
public boolean hasMessage() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string message = 1;
*/
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
message_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string message = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
message_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string message = 1;
*/
public Builder setMessage(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
message_ = value;
onChanged();
return this;
}
/**
* optional string message = 1;
*/
public Builder clearMessage() {
bitField0_ = (bitField0_ & ~0x00000001);
message_ = getDefaultInstance().getMessage();
onChanged();
return this;
}
/**
* optional string message = 1;
*/
public Builder setMessageBytes(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
message_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.org.apache.hadoop.shaded.com.on.OptResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.org.apache.hadoop.shaded.com.on.OptResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public OptResponseProto parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new OptResponseProto(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SleepRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.org.apache.hadoop.shaded.com.on.SleepRequestProto)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required int32 milliSeconds = 1;
*/
boolean hasMilliSeconds();
/**
* required int32 milliSeconds = 1;
*/
int getMilliSeconds();
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.SleepRequestProto}
*/
public static final class SleepRequestProto extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.org.apache.hadoop.shaded.com.on.SleepRequestProto)
SleepRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use SleepRequestProto.newBuilder() to construct.
private SleepRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private SleepRequestProto() {
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SleepRequestProto(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
bitField0_ |= 0x00000001;
milliSeconds_ = input.readInt32();
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.Builder.class);
}
private int bitField0_;
public static final int MILLISECONDS_FIELD_NUMBER = 1;
private int milliSeconds_;
/**
* required int32 milliSeconds = 1;
*/
public boolean hasMilliSeconds() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required int32 milliSeconds = 1;
*/
public int getMilliSeconds() {
return milliSeconds_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasMilliSeconds()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeInt32(1, milliSeconds_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.org.apache.hadoop.shaded.com.uteInt32Size(1, milliSeconds_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto) obj;
if (hasMilliSeconds() != other.hasMilliSeconds()) return false;
if (hasMilliSeconds()) {
if (getMilliSeconds()
!= other.getMilliSeconds()) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasMilliSeconds()) {
hash = (37 * hash) + MILLISECONDS_FIELD_NUMBER;
hash = (53 * hash) + getMilliSeconds();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.SleepRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.org.apache.hadoop.shaded.com.on.SleepRequestProto)
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
milliSeconds_ = 0;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto build() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto result = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.milliSeconds_ = milliSeconds_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.getDefaultInstance()) return this;
if (other.hasMilliSeconds()) {
setMilliSeconds(other.getMilliSeconds());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasMilliSeconds()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int milliSeconds_ ;
/**
* required int32 milliSeconds = 1;
*/
public boolean hasMilliSeconds() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required int32 milliSeconds = 1;
*/
public int getMilliSeconds() {
return milliSeconds_;
}
/**
* required int32 milliSeconds = 1;
*/
public Builder setMilliSeconds(int value) {
bitField0_ |= 0x00000001;
milliSeconds_ = value;
onChanged();
return this;
}
/**
* required int32 milliSeconds = 1;
*/
public Builder clearMilliSeconds() {
bitField0_ = (bitField0_ & ~0x00000001);
milliSeconds_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.org.apache.hadoop.shaded.com.on.SleepRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.org.apache.hadoop.shaded.com.on.SleepRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public SleepRequestProto parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new SleepRequestProto(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SleepResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.org.apache.hadoop.shaded.com.on.SleepResponseProto)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.SleepResponseProto}
*/
public static final class SleepResponseProto extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.org.apache.hadoop.shaded.com.on.SleepResponseProto)
SleepResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use SleepResponseProto.newBuilder() to construct.
private SleepResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private SleepResponseProto() {
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SleepResponseProto(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto) obj;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.SleepResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.org.apache.hadoop.shaded.com.on.SleepResponseProto)
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto build() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto result = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.org.apache.hadoop.shaded.com.on.SleepResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.org.apache.hadoop.shaded.com.on.SleepResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public SleepResponseProto parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new SleepResponseProto(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SlowPingRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.org.apache.hadoop.shaded.com.on.SlowPingRequestProto)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required bool shouldSlow = 1;
*/
boolean hasShouldSlow();
/**
* required bool shouldSlow = 1;
*/
boolean getShouldSlow();
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.SlowPingRequestProto}
*/
public static final class SlowPingRequestProto extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.org.apache.hadoop.shaded.com.on.SlowPingRequestProto)
SlowPingRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use SlowPingRequestProto.newBuilder() to construct.
private SlowPingRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private SlowPingRequestProto() {
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SlowPingRequestProto(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
bitField0_ |= 0x00000001;
shouldSlow_ = input.readBool();
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SlowPingRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SlowPingRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto.Builder.class);
}
private int bitField0_;
public static final int SHOULDSLOW_FIELD_NUMBER = 1;
private boolean shouldSlow_;
/**
* required bool shouldSlow = 1;
*/
public boolean hasShouldSlow() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required bool shouldSlow = 1;
*/
public boolean getShouldSlow() {
return shouldSlow_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasShouldSlow()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeBool(1, shouldSlow_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.org.apache.hadoop.shaded.com.uteBoolSize(1, shouldSlow_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto) obj;
if (hasShouldSlow() != other.hasShouldSlow()) return false;
if (hasShouldSlow()) {
if (getShouldSlow()
!= other.getShouldSlow()) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasShouldSlow()) {
hash = (37 * hash) + SHOULDSLOW_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
getShouldSlow());
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.SlowPingRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.org.apache.hadoop.shaded.com.on.SlowPingRequestProto)
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SlowPingRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SlowPingRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
shouldSlow_ = false;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SlowPingRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto build() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto result = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.shouldSlow_ = shouldSlow_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto.getDefaultInstance()) return this;
if (other.hasShouldSlow()) {
setShouldSlow(other.getShouldSlow());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasShouldSlow()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private boolean shouldSlow_ ;
/**
* required bool shouldSlow = 1;
*/
public boolean hasShouldSlow() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required bool shouldSlow = 1;
*/
public boolean getShouldSlow() {
return shouldSlow_;
}
/**
* required bool shouldSlow = 1;
*/
public Builder setShouldSlow(boolean value) {
bitField0_ |= 0x00000001;
shouldSlow_ = value;
onChanged();
return this;
}
/**
* required bool shouldSlow = 1;
*/
public Builder clearShouldSlow() {
bitField0_ = (bitField0_ & ~0x00000001);
shouldSlow_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.org.apache.hadoop.shaded.com.on.SlowPingRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.org.apache.hadoop.shaded.com.on.SlowPingRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public SlowPingRequestProto parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new SlowPingRequestProto(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface EchoRequestProto2OrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.org.apache.hadoop.shaded.com.on.EchoRequestProto2)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated string message = 1;
*/
java.util.List
getMessageList();
/**
* repeated string message = 1;
*/
int getMessageCount();
/**
* repeated string message = 1;
*/
java.lang.String getMessage(int index);
/**
* repeated string message = 1;
*/
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes(int index);
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.EchoRequestProto2}
*/
public static final class EchoRequestProto2 extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.org.apache.hadoop.shaded.com.on.EchoRequestProto2)
EchoRequestProto2OrBuilder {
private static final long serialVersionUID = 0L;
// Use EchoRequestProto2.newBuilder() to construct.
private EchoRequestProto2(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private EchoRequestProto2() {
message_ = org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private EchoRequestProto2(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
message_ = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000001;
}
message_.add(bs);
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
message_ = message_.getUnmodifiableView();
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto2_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto2_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2.Builder.class);
}
public static final int MESSAGE_FIELD_NUMBER = 1;
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList message_;
/**
* repeated string message = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getMessageList() {
return message_;
}
/**
* repeated string message = 1;
*/
public int getMessageCount() {
return message_.size();
}
/**
* repeated string message = 1;
*/
public java.lang.String getMessage(int index) {
return message_.get(index);
}
/**
* repeated string message = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes(int index) {
return message_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
for (int i = 0; i < message_.size(); i++) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, message_.getRaw(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < message_.size(); i++) {
dataSize += org.apache.hadoop.shaded.com.uteStringSizeNoTag(message_.getRaw(i));
}
size += dataSize;
size += 1 * getMessageList().size();
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 other = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2) obj;
if (!getMessageList()
.equals(other.getMessageList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getMessageCount() > 0) {
hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getMessageList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.EchoRequestProto2}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.org.apache.hadoop.shaded.com.on.EchoRequestProto2)
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2OrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto2_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto2_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
message_ = org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto2_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 build() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 result = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2(this);
int from_bitField0_ = bitField0_;
if (((bitField0_ & 0x00000001) != 0)) {
message_ = message_.getUnmodifiableView();
bitField0_ = (bitField0_ & ~0x00000001);
}
result.message_ = message_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2.getDefaultInstance()) return this;
if (!other.message_.isEmpty()) {
if (message_.isEmpty()) {
message_ = other.message_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureMessageIsMutable();
message_.addAll(other.message_);
}
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList message_ = org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
private void ensureMessageIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
message_ = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(message_);
bitField0_ |= 0x00000001;
}
}
/**
* repeated string message = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getMessageList() {
return message_.getUnmodifiableView();
}
/**
* repeated string message = 1;
*/
public int getMessageCount() {
return message_.size();
}
/**
* repeated string message = 1;
*/
public java.lang.String getMessage(int index) {
return message_.get(index);
}
/**
* repeated string message = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes(int index) {
return message_.getByteString(index);
}
/**
* repeated string message = 1;
*/
public Builder setMessage(
int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureMessageIsMutable();
message_.set(index, value);
onChanged();
return this;
}
/**
* repeated string message = 1;
*/
public Builder addMessage(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureMessageIsMutable();
message_.add(value);
onChanged();
return this;
}
/**
* repeated string message = 1;
*/
public Builder addAllMessage(
java.lang.Iterable values) {
ensureMessageIsMutable();
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, message_);
onChanged();
return this;
}
/**
* repeated string message = 1;
*/
public Builder clearMessage() {
message_ = org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* repeated string message = 1;
*/
public Builder addMessageBytes(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureMessageIsMutable();
message_.add(value);
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.org.apache.hadoop.shaded.com.on.EchoRequestProto2)
}
// @@protoc_insertion_point(class_scope:hadoop.org.apache.hadoop.shaded.com.on.EchoRequestProto2)
private static final org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public EchoRequestProto2 parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new EchoRequestProto2(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface EchoResponseProto2OrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.org.apache.hadoop.shaded.com.on.EchoResponseProto2)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated string message = 1;
*/
java.util.List
getMessageList();
/**
* repeated string message = 1;
*/
int getMessageCount();
/**
* repeated string message = 1;
*/
java.lang.String getMessage(int index);
/**
* repeated string message = 1;
*/
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes(int index);
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.EchoResponseProto2}
*/
public static final class EchoResponseProto2 extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.org.apache.hadoop.shaded.com.on.EchoResponseProto2)
EchoResponseProto2OrBuilder {
private static final long serialVersionUID = 0L;
// Use EchoResponseProto2.newBuilder() to construct.
private EchoResponseProto2(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private EchoResponseProto2() {
message_ = org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private EchoResponseProto2(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
message_ = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000001;
}
message_.add(bs);
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
message_ = message_.getUnmodifiableView();
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto2_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto2_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.Builder.class);
}
public static final int MESSAGE_FIELD_NUMBER = 1;
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList message_;
/**
* repeated string message = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getMessageList() {
return message_;
}
/**
* repeated string message = 1;
*/
public int getMessageCount() {
return message_.size();
}
/**
* repeated string message = 1;
*/
public java.lang.String getMessage(int index) {
return message_.get(index);
}
/**
* repeated string message = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes(int index) {
return message_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
for (int i = 0; i < message_.size(); i++) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, message_.getRaw(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < message_.size(); i++) {
dataSize += org.apache.hadoop.shaded.com.uteStringSizeNoTag(message_.getRaw(i));
}
size += dataSize;
size += 1 * getMessageList().size();
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 other = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2) obj;
if (!getMessageList()
.equals(other.getMessageList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getMessageCount() > 0) {
hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getMessageList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.EchoResponseProto2}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.org.apache.hadoop.shaded.com.on.EchoResponseProto2)
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2OrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto2_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto2_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
message_ = org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto2_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 build() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 result = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2(this);
int from_bitField0_ = bitField0_;
if (((bitField0_ & 0x00000001) != 0)) {
message_ = message_.getUnmodifiableView();
bitField0_ = (bitField0_ & ~0x00000001);
}
result.message_ = message_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.getDefaultInstance()) return this;
if (!other.message_.isEmpty()) {
if (message_.isEmpty()) {
message_ = other.message_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureMessageIsMutable();
message_.addAll(other.message_);
}
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList message_ = org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
private void ensureMessageIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
message_ = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(message_);
bitField0_ |= 0x00000001;
}
}
/**
* repeated string message = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getMessageList() {
return message_.getUnmodifiableView();
}
/**
* repeated string message = 1;
*/
public int getMessageCount() {
return message_.size();
}
/**
* repeated string message = 1;
*/
public java.lang.String getMessage(int index) {
return message_.get(index);
}
/**
* repeated string message = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes(int index) {
return message_.getByteString(index);
}
/**
* repeated string message = 1;
*/
public Builder setMessage(
int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureMessageIsMutable();
message_.set(index, value);
onChanged();
return this;
}
/**
* repeated string message = 1;
*/
public Builder addMessage(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureMessageIsMutable();
message_.add(value);
onChanged();
return this;
}
/**
* repeated string message = 1;
*/
public Builder addAllMessage(
java.lang.Iterable values) {
ensureMessageIsMutable();
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, message_);
onChanged();
return this;
}
/**
* repeated string message = 1;
*/
public Builder clearMessage() {
message_ = org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* repeated string message = 1;
*/
public Builder addMessageBytes(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureMessageIsMutable();
message_.add(value);
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.org.apache.hadoop.shaded.com.on.EchoResponseProto2)
}
// @@protoc_insertion_point(class_scope:hadoop.org.apache.hadoop.shaded.com.on.EchoResponseProto2)
private static final org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public EchoResponseProto2 parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new EchoResponseProto2(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface AddRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.org.apache.hadoop.shaded.com.on.AddRequestProto)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required int32 param1 = 1;
*/
boolean hasParam1();
/**
* required int32 param1 = 1;
*/
int getParam1();
/**
* required int32 param2 = 2;
*/
boolean hasParam2();
/**
* required int32 param2 = 2;
*/
int getParam2();
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.AddRequestProto}
*/
public static final class AddRequestProto extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.org.apache.hadoop.shaded.com.on.AddRequestProto)
AddRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use AddRequestProto.newBuilder() to construct.
private AddRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private AddRequestProto() {
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private AddRequestProto(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
bitField0_ |= 0x00000001;
param1_ = input.readInt32();
break;
}
case 16: {
bitField0_ |= 0x00000002;
param2_ = input.readInt32();
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto.Builder.class);
}
private int bitField0_;
public static final int PARAM1_FIELD_NUMBER = 1;
private int param1_;
/**
* required int32 param1 = 1;
*/
public boolean hasParam1() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required int32 param1 = 1;
*/
public int getParam1() {
return param1_;
}
public static final int PARAM2_FIELD_NUMBER = 2;
private int param2_;
/**
* required int32 param2 = 2;
*/
public boolean hasParam2() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required int32 param2 = 2;
*/
public int getParam2() {
return param2_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasParam1()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasParam2()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeInt32(1, param1_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeInt32(2, param2_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.org.apache.hadoop.shaded.com.uteInt32Size(1, param1_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.org.apache.hadoop.shaded.com.uteInt32Size(2, param2_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto) obj;
if (hasParam1() != other.hasParam1()) return false;
if (hasParam1()) {
if (getParam1()
!= other.getParam1()) return false;
}
if (hasParam2() != other.hasParam2()) return false;
if (hasParam2()) {
if (getParam2()
!= other.getParam2()) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasParam1()) {
hash = (37 * hash) + PARAM1_FIELD_NUMBER;
hash = (53 * hash) + getParam1();
}
if (hasParam2()) {
hash = (37 * hash) + PARAM2_FIELD_NUMBER;
hash = (53 * hash) + getParam2();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.AddRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.org.apache.hadoop.shaded.com.on.AddRequestProto)
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
param1_ = 0;
bitField0_ = (bitField0_ & ~0x00000001);
param2_ = 0;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto build() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto result = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.param1_ = param1_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.param2_ = param2_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto.getDefaultInstance()) return this;
if (other.hasParam1()) {
setParam1(other.getParam1());
}
if (other.hasParam2()) {
setParam2(other.getParam2());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasParam1()) {
return false;
}
if (!hasParam2()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int param1_ ;
/**
* required int32 param1 = 1;
*/
public boolean hasParam1() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required int32 param1 = 1;
*/
public int getParam1() {
return param1_;
}
/**
* required int32 param1 = 1;
*/
public Builder setParam1(int value) {
bitField0_ |= 0x00000001;
param1_ = value;
onChanged();
return this;
}
/**
* required int32 param1 = 1;
*/
public Builder clearParam1() {
bitField0_ = (bitField0_ & ~0x00000001);
param1_ = 0;
onChanged();
return this;
}
private int param2_ ;
/**
* required int32 param2 = 2;
*/
public boolean hasParam2() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required int32 param2 = 2;
*/
public int getParam2() {
return param2_;
}
/**
* required int32 param2 = 2;
*/
public Builder setParam2(int value) {
bitField0_ |= 0x00000002;
param2_ = value;
onChanged();
return this;
}
/**
* required int32 param2 = 2;
*/
public Builder clearParam2() {
bitField0_ = (bitField0_ & ~0x00000002);
param2_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.org.apache.hadoop.shaded.com.on.AddRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.org.apache.hadoop.shaded.com.on.AddRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public AddRequestProto parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new AddRequestProto(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface AddRequestProto2OrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.org.apache.hadoop.shaded.com.on.AddRequestProto2)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated int32 params = 1;
*/
java.util.List getParamsList();
/**
* repeated int32 params = 1;
*/
int getParamsCount();
/**
* repeated int32 params = 1;
*/
int getParams(int index);
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.AddRequestProto2}
*/
public static final class AddRequestProto2 extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.org.apache.hadoop.shaded.com.on.AddRequestProto2)
AddRequestProto2OrBuilder {
private static final long serialVersionUID = 0L;
// Use AddRequestProto2.newBuilder() to construct.
private AddRequestProto2(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private AddRequestProto2() {
params_ = emptyIntList();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private AddRequestProto2(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
params_ = newIntList();
mutable_bitField0_ |= 0x00000001;
}
params_.addInt(input.readInt32());
break;
}
case 10: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000001) != 0) && input.getBytesUntilLimit() > 0) {
params_ = newIntList();
mutable_bitField0_ |= 0x00000001;
}
while (input.getBytesUntilLimit() > 0) {
params_.addInt(input.readInt32());
}
input.popLimit(limit);
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
params_.makeImmutable(); // C
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddRequestProto2_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddRequestProto2_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2.Builder.class);
}
public static final int PARAMS_FIELD_NUMBER = 1;
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.IntList params_;
/**
* repeated int32 params = 1;
*/
public java.util.List
getParamsList() {
return params_;
}
/**
* repeated int32 params = 1;
*/
public int getParamsCount() {
return params_.size();
}
/**
* repeated int32 params = 1;
*/
public int getParams(int index) {
return params_.getInt(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
for (int i = 0; i < params_.size(); i++) {
output.writeInt32(1, params_.getInt(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < params_.size(); i++) {
dataSize += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.org.apache.hadoop.shaded.com.uteInt32SizeNoTag(params_.getInt(i));
}
size += dataSize;
size += 1 * getParamsList().size();
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 other = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2) obj;
if (!getParamsList()
.equals(other.getParamsList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getParamsCount() > 0) {
hash = (37 * hash) + PARAMS_FIELD_NUMBER;
hash = (53 * hash) + getParamsList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.AddRequestProto2}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.org.apache.hadoop.shaded.com.on.AddRequestProto2)
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2OrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddRequestProto2_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddRequestProto2_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
params_ = emptyIntList();
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddRequestProto2_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 build() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 result = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2(this);
int from_bitField0_ = bitField0_;
if (((bitField0_ & 0x00000001) != 0)) {
params_.makeImmutable();
bitField0_ = (bitField0_ & ~0x00000001);
}
result.params_ = params_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2.getDefaultInstance()) return this;
if (!other.params_.isEmpty()) {
if (params_.isEmpty()) {
params_ = other.params_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureParamsIsMutable();
params_.addAll(other.params_);
}
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.IntList params_ = emptyIntList();
private void ensureParamsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
params_ = mutableCopy(params_);
bitField0_ |= 0x00000001;
}
}
/**
* repeated int32 params = 1;
*/
public java.util.List
getParamsList() {
return ((bitField0_ & 0x00000001) != 0) ?
java.util.Collections.unmodifiableList(params_) : params_;
}
/**
* repeated int32 params = 1;
*/
public int getParamsCount() {
return params_.size();
}
/**
* repeated int32 params = 1;
*/
public int getParams(int index) {
return params_.getInt(index);
}
/**
* repeated int32 params = 1;
*/
public Builder setParams(
int index, int value) {
ensureParamsIsMutable();
params_.setInt(index, value);
onChanged();
return this;
}
/**
* repeated int32 params = 1;
*/
public Builder addParams(int value) {
ensureParamsIsMutable();
params_.addInt(value);
onChanged();
return this;
}
/**
* repeated int32 params = 1;
*/
public Builder addAllParams(
java.lang.Iterable extends java.lang.Integer> values) {
ensureParamsIsMutable();
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, params_);
onChanged();
return this;
}
/**
* repeated int32 params = 1;
*/
public Builder clearParams() {
params_ = emptyIntList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.org.apache.hadoop.shaded.com.on.AddRequestProto2)
}
// @@protoc_insertion_point(class_scope:hadoop.org.apache.hadoop.shaded.com.on.AddRequestProto2)
private static final org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public AddRequestProto2 parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new AddRequestProto2(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface AddResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.org.apache.hadoop.shaded.com.on.AddResponseProto)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required int32 result = 1;
*/
boolean hasResult();
/**
* required int32 result = 1;
*/
int getResult();
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.AddResponseProto}
*/
public static final class AddResponseProto extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.org.apache.hadoop.shaded.com.on.AddResponseProto)
AddResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use AddResponseProto.newBuilder() to construct.
private AddResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private AddResponseProto() {
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private AddResponseProto(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
bitField0_ |= 0x00000001;
result_ = input.readInt32();
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.Builder.class);
}
private int bitField0_;
public static final int RESULT_FIELD_NUMBER = 1;
private int result_;
/**
* required int32 result = 1;
*/
public boolean hasResult() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required int32 result = 1;
*/
public int getResult() {
return result_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasResult()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeInt32(1, result_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.org.apache.hadoop.shaded.com.uteInt32Size(1, result_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto) obj;
if (hasResult() != other.hasResult()) return false;
if (hasResult()) {
if (getResult()
!= other.getResult()) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasResult()) {
hash = (37 * hash) + RESULT_FIELD_NUMBER;
hash = (53 * hash) + getResult();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.AddResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.org.apache.hadoop.shaded.com.on.AddResponseProto)
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
result_ = 0;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto build() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto result = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.result_ = result_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.getDefaultInstance()) return this;
if (other.hasResult()) {
setResult(other.getResult());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasResult()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int result_ ;
/**
* required int32 result = 1;
*/
public boolean hasResult() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required int32 result = 1;
*/
public int getResult() {
return result_;
}
/**
* required int32 result = 1;
*/
public Builder setResult(int value) {
bitField0_ |= 0x00000001;
result_ = value;
onChanged();
return this;
}
/**
* required int32 result = 1;
*/
public Builder clearResult() {
bitField0_ = (bitField0_ & ~0x00000001);
result_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.org.apache.hadoop.shaded.com.on.AddResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.org.apache.hadoop.shaded.com.on.AddResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public AddResponseProto parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new AddResponseProto(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface ExchangeRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.org.apache.hadoop.shaded.com.on.ExchangeRequestProto)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated int32 values = 1;
*/
java.util.List getValuesList();
/**
* repeated int32 values = 1;
*/
int getValuesCount();
/**
* repeated int32 values = 1;
*/
int getValues(int index);
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.ExchangeRequestProto}
*/
public static final class ExchangeRequestProto extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.org.apache.hadoop.shaded.com.on.ExchangeRequestProto)
ExchangeRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use ExchangeRequestProto.newBuilder() to construct.
private ExchangeRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private ExchangeRequestProto() {
values_ = emptyIntList();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ExchangeRequestProto(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
values_ = newIntList();
mutable_bitField0_ |= 0x00000001;
}
values_.addInt(input.readInt32());
break;
}
case 10: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000001) != 0) && input.getBytesUntilLimit() > 0) {
values_ = newIntList();
mutable_bitField0_ |= 0x00000001;
}
while (input.getBytesUntilLimit() > 0) {
values_.addInt(input.readInt32());
}
input.popLimit(limit);
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
values_.makeImmutable(); // C
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_ExchangeRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_ExchangeRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto.Builder.class);
}
public static final int VALUES_FIELD_NUMBER = 1;
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.IntList values_;
/**
* repeated int32 values = 1;
*/
public java.util.List
getValuesList() {
return values_;
}
/**
* repeated int32 values = 1;
*/
public int getValuesCount() {
return values_.size();
}
/**
* repeated int32 values = 1;
*/
public int getValues(int index) {
return values_.getInt(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
for (int i = 0; i < values_.size(); i++) {
output.writeInt32(1, values_.getInt(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < values_.size(); i++) {
dataSize += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.org.apache.hadoop.shaded.com.uteInt32SizeNoTag(values_.getInt(i));
}
size += dataSize;
size += 1 * getValuesList().size();
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto) obj;
if (!getValuesList()
.equals(other.getValuesList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getValuesCount() > 0) {
hash = (37 * hash) + VALUES_FIELD_NUMBER;
hash = (53 * hash) + getValuesList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.ExchangeRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.org.apache.hadoop.shaded.com.on.ExchangeRequestProto)
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_ExchangeRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_ExchangeRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
values_ = emptyIntList();
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_ExchangeRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto build() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto result = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto(this);
int from_bitField0_ = bitField0_;
if (((bitField0_ & 0x00000001) != 0)) {
values_.makeImmutable();
bitField0_ = (bitField0_ & ~0x00000001);
}
result.values_ = values_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto.getDefaultInstance()) return this;
if (!other.values_.isEmpty()) {
if (values_.isEmpty()) {
values_ = other.values_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureValuesIsMutable();
values_.addAll(other.values_);
}
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.IntList values_ = emptyIntList();
private void ensureValuesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
values_ = mutableCopy(values_);
bitField0_ |= 0x00000001;
}
}
/**
* repeated int32 values = 1;
*/
public java.util.List
getValuesList() {
return ((bitField0_ & 0x00000001) != 0) ?
java.util.Collections.unmodifiableList(values_) : values_;
}
/**
* repeated int32 values = 1;
*/
public int getValuesCount() {
return values_.size();
}
/**
* repeated int32 values = 1;
*/
public int getValues(int index) {
return values_.getInt(index);
}
/**
* repeated int32 values = 1;
*/
public Builder setValues(
int index, int value) {
ensureValuesIsMutable();
values_.setInt(index, value);
onChanged();
return this;
}
/**
* repeated int32 values = 1;
*/
public Builder addValues(int value) {
ensureValuesIsMutable();
values_.addInt(value);
onChanged();
return this;
}
/**
* repeated int32 values = 1;
*/
public Builder addAllValues(
java.lang.Iterable extends java.lang.Integer> values) {
ensureValuesIsMutable();
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, values_);
onChanged();
return this;
}
/**
* repeated int32 values = 1;
*/
public Builder clearValues() {
values_ = emptyIntList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.org.apache.hadoop.shaded.com.on.ExchangeRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.org.apache.hadoop.shaded.com.on.ExchangeRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public ExchangeRequestProto parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new ExchangeRequestProto(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface ExchangeResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.org.apache.hadoop.shaded.com.on.ExchangeResponseProto)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated int32 values = 1;
*/
java.util.List getValuesList();
/**
* repeated int32 values = 1;
*/
int getValuesCount();
/**
* repeated int32 values = 1;
*/
int getValues(int index);
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.ExchangeResponseProto}
*/
public static final class ExchangeResponseProto extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.org.apache.hadoop.shaded.com.on.ExchangeResponseProto)
ExchangeResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use ExchangeResponseProto.newBuilder() to construct.
private ExchangeResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private ExchangeResponseProto() {
values_ = emptyIntList();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ExchangeResponseProto(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
values_ = newIntList();
mutable_bitField0_ |= 0x00000001;
}
values_.addInt(input.readInt32());
break;
}
case 10: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000001) != 0) && input.getBytesUntilLimit() > 0) {
values_ = newIntList();
mutable_bitField0_ |= 0x00000001;
}
while (input.getBytesUntilLimit() > 0) {
values_.addInt(input.readInt32());
}
input.popLimit(limit);
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
values_.makeImmutable(); // C
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_ExchangeResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_ExchangeResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.Builder.class);
}
public static final int VALUES_FIELD_NUMBER = 1;
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.IntList values_;
/**
* repeated int32 values = 1;
*/
public java.util.List
getValuesList() {
return values_;
}
/**
* repeated int32 values = 1;
*/
public int getValuesCount() {
return values_.size();
}
/**
* repeated int32 values = 1;
*/
public int getValues(int index) {
return values_.getInt(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
for (int i = 0; i < values_.size(); i++) {
output.writeInt32(1, values_.getInt(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < values_.size(); i++) {
dataSize += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.org.apache.hadoop.shaded.com.uteInt32SizeNoTag(values_.getInt(i));
}
size += dataSize;
size += 1 * getValuesList().size();
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto) obj;
if (!getValuesList()
.equals(other.getValuesList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getValuesCount() > 0) {
hash = (37 * hash) + VALUES_FIELD_NUMBER;
hash = (53 * hash) + getValuesList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.ExchangeResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.org.apache.hadoop.shaded.com.on.ExchangeResponseProto)
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_ExchangeResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_ExchangeResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
values_ = emptyIntList();
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_ExchangeResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto build() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto result = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto(this);
int from_bitField0_ = bitField0_;
if (((bitField0_ & 0x00000001) != 0)) {
values_.makeImmutable();
bitField0_ = (bitField0_ & ~0x00000001);
}
result.values_ = values_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.getDefaultInstance()) return this;
if (!other.values_.isEmpty()) {
if (values_.isEmpty()) {
values_ = other.values_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureValuesIsMutable();
values_.addAll(other.values_);
}
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.IntList values_ = emptyIntList();
private void ensureValuesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
values_ = mutableCopy(values_);
bitField0_ |= 0x00000001;
}
}
/**
* repeated int32 values = 1;
*/
public java.util.List
getValuesList() {
return ((bitField0_ & 0x00000001) != 0) ?
java.util.Collections.unmodifiableList(values_) : values_;
}
/**
* repeated int32 values = 1;
*/
public int getValuesCount() {
return values_.size();
}
/**
* repeated int32 values = 1;
*/
public int getValues(int index) {
return values_.getInt(index);
}
/**
* repeated int32 values = 1;
*/
public Builder setValues(
int index, int value) {
ensureValuesIsMutable();
values_.setInt(index, value);
onChanged();
return this;
}
/**
* repeated int32 values = 1;
*/
public Builder addValues(int value) {
ensureValuesIsMutable();
values_.addInt(value);
onChanged();
return this;
}
/**
* repeated int32 values = 1;
*/
public Builder addAllValues(
java.lang.Iterable extends java.lang.Integer> values) {
ensureValuesIsMutable();
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, values_);
onChanged();
return this;
}
/**
* repeated int32 values = 1;
*/
public Builder clearValues() {
values_ = emptyIntList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.org.apache.hadoop.shaded.com.on.ExchangeResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.org.apache.hadoop.shaded.com.on.ExchangeResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public ExchangeResponseProto parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new ExchangeResponseProto(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface AuthMethodResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.org.apache.hadoop.shaded.com.on.AuthMethodResponseProto)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required int32 code = 1;
*/
boolean hasCode();
/**
* required int32 code = 1;
*/
int getCode();
/**
* required string mechanismName = 2;
*/
boolean hasMechanismName();
/**
* required string mechanismName = 2;
*/
java.lang.String getMechanismName();
/**
* required string mechanismName = 2;
*/
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMechanismNameBytes();
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.AuthMethodResponseProto}
*/
public static final class AuthMethodResponseProto extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.org.apache.hadoop.shaded.com.on.AuthMethodResponseProto)
AuthMethodResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use AuthMethodResponseProto.newBuilder() to construct.
private AuthMethodResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private AuthMethodResponseProto() {
mechanismName_ = "";
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private AuthMethodResponseProto(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
bitField0_ |= 0x00000001;
code_ = input.readInt32();
break;
}
case 18: {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000002;
mechanismName_ = bs;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AuthMethodResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AuthMethodResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.Builder.class);
}
private int bitField0_;
public static final int CODE_FIELD_NUMBER = 1;
private int code_;
/**
* required int32 code = 1;
*/
public boolean hasCode() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required int32 code = 1;
*/
public int getCode() {
return code_;
}
public static final int MECHANISMNAME_FIELD_NUMBER = 2;
private volatile java.lang.Object mechanismName_;
/**
* required string mechanismName = 2;
*/
public boolean hasMechanismName() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required string mechanismName = 2;
*/
public java.lang.String getMechanismName() {
java.lang.Object ref = mechanismName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
mechanismName_ = s;
}
return s;
}
}
/**
* required string mechanismName = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMechanismNameBytes() {
java.lang.Object ref = mechanismName_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
mechanismName_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasCode()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasMechanismName()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeInt32(1, code_);
}
if (((bitField0_ & 0x00000002) != 0)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, mechanismName_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.org.apache.hadoop.shaded.com.uteInt32Size(1, code_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.org.apache.hadoop.shaded.com.uteStringSize(2, mechanismName_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto) obj;
if (hasCode() != other.hasCode()) return false;
if (hasCode()) {
if (getCode()
!= other.getCode()) return false;
}
if (hasMechanismName() != other.hasMechanismName()) return false;
if (hasMechanismName()) {
if (!getMechanismName()
.equals(other.getMechanismName())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasCode()) {
hash = (37 * hash) + CODE_FIELD_NUMBER;
hash = (53 * hash) + getCode();
}
if (hasMechanismName()) {
hash = (37 * hash) + MECHANISMNAME_FIELD_NUMBER;
hash = (53 * hash) + getMechanismName().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.AuthMethodResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.org.apache.hadoop.shaded.com.on.AuthMethodResponseProto)
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AuthMethodResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AuthMethodResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
code_ = 0;
bitField0_ = (bitField0_ & ~0x00000001);
mechanismName_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AuthMethodResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto build() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto result = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.code_ = code_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
to_bitField0_ |= 0x00000002;
}
result.mechanismName_ = mechanismName_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.getDefaultInstance()) return this;
if (other.hasCode()) {
setCode(other.getCode());
}
if (other.hasMechanismName()) {
bitField0_ |= 0x00000002;
mechanismName_ = other.mechanismName_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasCode()) {
return false;
}
if (!hasMechanismName()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int code_ ;
/**
* required int32 code = 1;
*/
public boolean hasCode() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required int32 code = 1;
*/
public int getCode() {
return code_;
}
/**
* required int32 code = 1;
*/
public Builder setCode(int value) {
bitField0_ |= 0x00000001;
code_ = value;
onChanged();
return this;
}
/**
* required int32 code = 1;
*/
public Builder clearCode() {
bitField0_ = (bitField0_ & ~0x00000001);
code_ = 0;
onChanged();
return this;
}
private java.lang.Object mechanismName_ = "";
/**
* required string mechanismName = 2;
*/
public boolean hasMechanismName() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required string mechanismName = 2;
*/
public java.lang.String getMechanismName() {
java.lang.Object ref = mechanismName_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
mechanismName_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string mechanismName = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMechanismNameBytes() {
java.lang.Object ref = mechanismName_;
if (ref instanceof String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
mechanismName_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string mechanismName = 2;
*/
public Builder setMechanismName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
mechanismName_ = value;
onChanged();
return this;
}
/**
* required string mechanismName = 2;
*/
public Builder clearMechanismName() {
bitField0_ = (bitField0_ & ~0x00000002);
mechanismName_ = getDefaultInstance().getMechanismName();
onChanged();
return this;
}
/**
* required string mechanismName = 2;
*/
public Builder setMechanismNameBytes(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
mechanismName_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.org.apache.hadoop.shaded.com.on.AuthMethodResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.org.apache.hadoop.shaded.com.on.AuthMethodResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public AuthMethodResponseProto parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new AuthMethodResponseProto(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface UserResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.org.apache.hadoop.shaded.com.on.UserResponseProto)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required string user = 1;
*/
boolean hasUser();
/**
* required string user = 1;
*/
java.lang.String getUser();
/**
* required string user = 1;
*/
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getUserBytes();
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.UserResponseProto}
*/
public static final class UserResponseProto extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.org.apache.hadoop.shaded.com.on.UserResponseProto)
UserResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use UserResponseProto.newBuilder() to construct.
private UserResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private UserResponseProto() {
user_ = "";
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private UserResponseProto(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000001;
user_ = bs;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_UserResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_UserResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.Builder.class);
}
private int bitField0_;
public static final int USER_FIELD_NUMBER = 1;
private volatile java.lang.Object user_;
/**
* required string user = 1;
*/
public boolean hasUser() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string user = 1;
*/
public java.lang.String getUser() {
java.lang.Object ref = user_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
user_ = s;
}
return s;
}
}
/**
* required string user = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getUserBytes() {
java.lang.Object ref = user_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
user_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasUser()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, user_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.org.apache.hadoop.shaded.com.uteStringSize(1, user_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto) obj;
if (hasUser() != other.hasUser()) return false;
if (hasUser()) {
if (!getUser()
.equals(other.getUser())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUser()) {
hash = (37 * hash) + USER_FIELD_NUMBER;
hash = (53 * hash) + getUser().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.UserResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.org.apache.hadoop.shaded.com.on.UserResponseProto)
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_UserResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_UserResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
user_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_UserResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto build() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto result = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.user_ = user_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.getDefaultInstance()) return this;
if (other.hasUser()) {
bitField0_ |= 0x00000001;
user_ = other.user_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasUser()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object user_ = "";
/**
* required string user = 1;
*/
public boolean hasUser() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string user = 1;
*/
public java.lang.String getUser() {
java.lang.Object ref = user_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
user_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string user = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getUserBytes() {
java.lang.Object ref = user_;
if (ref instanceof String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
user_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string user = 1;
*/
public Builder setUser(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
user_ = value;
onChanged();
return this;
}
/**
* required string user = 1;
*/
public Builder clearUser() {
bitField0_ = (bitField0_ & ~0x00000001);
user_ = getDefaultInstance().getUser();
onChanged();
return this;
}
/**
* required string user = 1;
*/
public Builder setUserBytes(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
user_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.org.apache.hadoop.shaded.com.on.UserResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.org.apache.hadoop.shaded.com.on.UserResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public UserResponseProto parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new UserResponseProto(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SleepRequestProto2OrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.org.apache.hadoop.shaded.com.on.SleepRequestProto2)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional int64 sleep_time = 1;
*/
boolean hasSleepTime();
/**
* optional int64 sleep_time = 1;
*/
long getSleepTime();
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.SleepRequestProto2}
*/
public static final class SleepRequestProto2 extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.org.apache.hadoop.shaded.com.on.SleepRequestProto2)
SleepRequestProto2OrBuilder {
private static final long serialVersionUID = 0L;
// Use SleepRequestProto2.newBuilder() to construct.
private SleepRequestProto2(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private SleepRequestProto2() {
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SleepRequestProto2(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
bitField0_ |= 0x00000001;
sleepTime_ = input.readInt64();
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepRequestProto2_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepRequestProto2_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2.Builder.class);
}
private int bitField0_;
public static final int SLEEP_TIME_FIELD_NUMBER = 1;
private long sleepTime_;
/**
* optional int64 sleep_time = 1;
*/
public boolean hasSleepTime() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional int64 sleep_time = 1;
*/
public long getSleepTime() {
return sleepTime_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeInt64(1, sleepTime_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.org.apache.hadoop.shaded.com.uteInt64Size(1, sleepTime_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 other = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2) obj;
if (hasSleepTime() != other.hasSleepTime()) return false;
if (hasSleepTime()) {
if (getSleepTime()
!= other.getSleepTime()) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSleepTime()) {
hash = (37 * hash) + SLEEP_TIME_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getSleepTime());
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.SleepRequestProto2}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.org.apache.hadoop.shaded.com.on.SleepRequestProto2)
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2OrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepRequestProto2_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepRequestProto2_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
sleepTime_ = 0L;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepRequestProto2_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 build() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 result = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.sleepTime_ = sleepTime_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2.getDefaultInstance()) return this;
if (other.hasSleepTime()) {
setSleepTime(other.getSleepTime());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private long sleepTime_ ;
/**
* optional int64 sleep_time = 1;
*/
public boolean hasSleepTime() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional int64 sleep_time = 1;
*/
public long getSleepTime() {
return sleepTime_;
}
/**
* optional int64 sleep_time = 1;
*/
public Builder setSleepTime(long value) {
bitField0_ |= 0x00000001;
sleepTime_ = value;
onChanged();
return this;
}
/**
* optional int64 sleep_time = 1;
*/
public Builder clearSleepTime() {
bitField0_ = (bitField0_ & ~0x00000001);
sleepTime_ = 0L;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.org.apache.hadoop.shaded.com.on.SleepRequestProto2)
}
// @@protoc_insertion_point(class_scope:hadoop.org.apache.hadoop.shaded.com.on.SleepRequestProto2)
private static final org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public SleepRequestProto2 parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new SleepRequestProto2(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SleepResponseProto2OrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.org.apache.hadoop.shaded.com.on.SleepResponseProto2)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional int64 receive_time = 1;
*/
boolean hasReceiveTime();
/**
* optional int64 receive_time = 1;
*/
long getReceiveTime();
/**
* optional int64 response_time = 2;
*/
boolean hasResponseTime();
/**
* optional int64 response_time = 2;
*/
long getResponseTime();
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.SleepResponseProto2}
*/
public static final class SleepResponseProto2 extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.org.apache.hadoop.shaded.com.on.SleepResponseProto2)
SleepResponseProto2OrBuilder {
private static final long serialVersionUID = 0L;
// Use SleepResponseProto2.newBuilder() to construct.
private SleepResponseProto2(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private SleepResponseProto2() {
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SleepResponseProto2(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
bitField0_ |= 0x00000001;
receiveTime_ = input.readInt64();
break;
}
case 16: {
bitField0_ |= 0x00000002;
responseTime_ = input.readInt64();
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepResponseProto2_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepResponseProto2_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2.Builder.class);
}
private int bitField0_;
public static final int RECEIVE_TIME_FIELD_NUMBER = 1;
private long receiveTime_;
/**
* optional int64 receive_time = 1;
*/
public boolean hasReceiveTime() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional int64 receive_time = 1;
*/
public long getReceiveTime() {
return receiveTime_;
}
public static final int RESPONSE_TIME_FIELD_NUMBER = 2;
private long responseTime_;
/**
* optional int64 response_time = 2;
*/
public boolean hasResponseTime() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional int64 response_time = 2;
*/
public long getResponseTime() {
return responseTime_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeInt64(1, receiveTime_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeInt64(2, responseTime_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.org.apache.hadoop.shaded.com.uteInt64Size(1, receiveTime_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.org.apache.hadoop.shaded.com.uteInt64Size(2, responseTime_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 other = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2) obj;
if (hasReceiveTime() != other.hasReceiveTime()) return false;
if (hasReceiveTime()) {
if (getReceiveTime()
!= other.getReceiveTime()) return false;
}
if (hasResponseTime() != other.hasResponseTime()) return false;
if (hasResponseTime()) {
if (getResponseTime()
!= other.getResponseTime()) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasReceiveTime()) {
hash = (37 * hash) + RECEIVE_TIME_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getReceiveTime());
}
if (hasResponseTime()) {
hash = (37 * hash) + RESPONSE_TIME_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getResponseTime());
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.org.apache.hadoop.shaded.com.on.SleepResponseProto2}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.org.apache.hadoop.shaded.com.on.SleepResponseProto2)
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2OrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepResponseProto2_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepResponseProto2_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2.class, org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
receiveTime_ = 0L;
bitField0_ = (bitField0_ & ~0x00000001);
responseTime_ = 0L;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepResponseProto2_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 build() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 result = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.receiveTime_ = receiveTime_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.responseTime_ = responseTime_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2.getDefaultInstance()) return this;
if (other.hasReceiveTime()) {
setReceiveTime(other.getReceiveTime());
}
if (other.hasResponseTime()) {
setResponseTime(other.getResponseTime());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private long receiveTime_ ;
/**
* optional int64 receive_time = 1;
*/
public boolean hasReceiveTime() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional int64 receive_time = 1;
*/
public long getReceiveTime() {
return receiveTime_;
}
/**
* optional int64 receive_time = 1;
*/
public Builder setReceiveTime(long value) {
bitField0_ |= 0x00000001;
receiveTime_ = value;
onChanged();
return this;
}
/**
* optional int64 receive_time = 1;
*/
public Builder clearReceiveTime() {
bitField0_ = (bitField0_ & ~0x00000001);
receiveTime_ = 0L;
onChanged();
return this;
}
private long responseTime_ ;
/**
* optional int64 response_time = 2;
*/
public boolean hasResponseTime() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional int64 response_time = 2;
*/
public long getResponseTime() {
return responseTime_;
}
/**
* optional int64 response_time = 2;
*/
public Builder setResponseTime(long value) {
bitField0_ |= 0x00000002;
responseTime_ = value;
onChanged();
return this;
}
/**
* optional int64 response_time = 2;
*/
public Builder clearResponseTime() {
bitField0_ = (bitField0_ & ~0x00000002);
responseTime_ = 0L;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.org.apache.hadoop.shaded.com.on.SleepResponseProto2)
}
// @@protoc_insertion_point(class_scope:hadoop.org.apache.hadoop.shaded.com.on.SleepResponseProto2)
private static final org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public SleepResponseProto2 parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new SleepResponseProto2(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_EmptyRequestProto_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_EmptyResponseProto_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_EchoRequestProto_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_EchoResponseProto_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_OptRequestProto_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_OptRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_OptResponseProto_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_OptResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_SleepRequestProto_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_SleepRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_SleepResponseProto_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_SleepResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_SlowPingRequestProto_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_SlowPingRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_EchoRequestProto2_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_EchoRequestProto2_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_EchoResponseProto2_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_EchoResponseProto2_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_AddRequestProto_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_AddRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_AddRequestProto2_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_AddRequestProto2_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_AddResponseProto_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_AddResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_ExchangeRequestProto_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_ExchangeRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_ExchangeResponseProto_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_ExchangeResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_AuthMethodResponseProto_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_AuthMethodResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_UserResponseProto_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_UserResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_SleepRequestProto2_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_SleepRequestProto2_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_SleepResponseProto2_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_SleepResponseProto2_fieldAccessorTable;
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\ntest.proto\022\rhadoop.org.apache.hadoop.shaded.com.on\"\023\n\021EmptyRequ" +
"estProto\"\024\n\022EmptyResponseProto\"#\n\020EchoRe" +
"questProto\022\017\n\007message\030\001 \002(\t\"$\n\021EchoRespo" +
"nseProto\022\017\n\007message\030\001 \002(\t\"\"\n\017OptRequestP" +
"roto\022\017\n\007message\030\001 \001(\t\"#\n\020OptResponseProt" +
"o\022\017\n\007message\030\001 \001(\t\")\n\021SleepRequestProto\022" +
"\024\n\014milliSeconds\030\001 \002(\005\"\024\n\022SleepResponsePr" +
"oto\"*\n\024SlowPingRequestProto\022\022\n\nshouldSlo" +
"w\030\001 \002(\010\"$\n\021EchoRequestProto2\022\017\n\007message\030" +
"\001 \003(\t\"%\n\022EchoResponseProto2\022\017\n\007message\030\001" +
" \003(\t\"1\n\017AddRequestProto\022\016\n\006param1\030\001 \002(\005\022" +
"\016\n\006param2\030\002 \002(\005\"\"\n\020AddRequestProto2\022\016\n\006p" +
"arams\030\001 \003(\005\"\"\n\020AddResponseProto\022\016\n\006resul" +
"t\030\001 \002(\005\"&\n\024ExchangeRequestProto\022\016\n\006value" +
"s\030\001 \003(\005\"\'\n\025ExchangeResponseProto\022\016\n\006valu" +
"es\030\001 \003(\005\">\n\027AuthMethodResponseProto\022\014\n\004c" +
"ode\030\001 \002(\005\022\025\n\rmechanismName\030\002 \002(\t\"!\n\021User" +
"ResponseProto\022\014\n\004user\030\001 \002(\t\"(\n\022SleepRequ" +
"estProto2\022\022\n\nsleep_time\030\001 \001(\003\"B\n\023SleepRe" +
"sponseProto2\022\024\n\014receive_time\030\001 \001(\003\022\025\n\rre" +
"sponse_time\030\002 \001(\003B/\n\036org.apache.hadoop.i" +
"pc.protobufB\nTestProtos\240\001\001"
};
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry assignDescriptors(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
return null;
}
};
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
internal_static_hadoop_common_EmptyRequestProto_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_EmptyRequestProto_descriptor,
new java.lang.String[] { });
internal_static_hadoop_common_EmptyResponseProto_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_EmptyResponseProto_descriptor,
new java.lang.String[] { });
internal_static_hadoop_common_EchoRequestProto_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_EchoRequestProto_descriptor,
new java.lang.String[] { "Message", });
internal_static_hadoop_common_EchoResponseProto_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_EchoResponseProto_descriptor,
new java.lang.String[] { "Message", });
internal_static_hadoop_common_OptRequestProto_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_hadoop_common_OptRequestProto_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_OptRequestProto_descriptor,
new java.lang.String[] { "Message", });
internal_static_hadoop_common_OptResponseProto_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_hadoop_common_OptResponseProto_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_OptResponseProto_descriptor,
new java.lang.String[] { "Message", });
internal_static_hadoop_common_SleepRequestProto_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_hadoop_common_SleepRequestProto_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_SleepRequestProto_descriptor,
new java.lang.String[] { "MilliSeconds", });
internal_static_hadoop_common_SleepResponseProto_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_hadoop_common_SleepResponseProto_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_SleepResponseProto_descriptor,
new java.lang.String[] { });
internal_static_hadoop_common_SlowPingRequestProto_descriptor =
getDescriptor().getMessageTypes().get(8);
internal_static_hadoop_common_SlowPingRequestProto_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_SlowPingRequestProto_descriptor,
new java.lang.String[] { "ShouldSlow", });
internal_static_hadoop_common_EchoRequestProto2_descriptor =
getDescriptor().getMessageTypes().get(9);
internal_static_hadoop_common_EchoRequestProto2_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_EchoRequestProto2_descriptor,
new java.lang.String[] { "Message", });
internal_static_hadoop_common_EchoResponseProto2_descriptor =
getDescriptor().getMessageTypes().get(10);
internal_static_hadoop_common_EchoResponseProto2_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_EchoResponseProto2_descriptor,
new java.lang.String[] { "Message", });
internal_static_hadoop_common_AddRequestProto_descriptor =
getDescriptor().getMessageTypes().get(11);
internal_static_hadoop_common_AddRequestProto_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_AddRequestProto_descriptor,
new java.lang.String[] { "Param1", "Param2", });
internal_static_hadoop_common_AddRequestProto2_descriptor =
getDescriptor().getMessageTypes().get(12);
internal_static_hadoop_common_AddRequestProto2_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_AddRequestProto2_descriptor,
new java.lang.String[] { "Params", });
internal_static_hadoop_common_AddResponseProto_descriptor =
getDescriptor().getMessageTypes().get(13);
internal_static_hadoop_common_AddResponseProto_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_AddResponseProto_descriptor,
new java.lang.String[] { "Result", });
internal_static_hadoop_common_ExchangeRequestProto_descriptor =
getDescriptor().getMessageTypes().get(14);
internal_static_hadoop_common_ExchangeRequestProto_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_ExchangeRequestProto_descriptor,
new java.lang.String[] { "Values", });
internal_static_hadoop_common_ExchangeResponseProto_descriptor =
getDescriptor().getMessageTypes().get(15);
internal_static_hadoop_common_ExchangeResponseProto_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_ExchangeResponseProto_descriptor,
new java.lang.String[] { "Values", });
internal_static_hadoop_common_AuthMethodResponseProto_descriptor =
getDescriptor().getMessageTypes().get(16);
internal_static_hadoop_common_AuthMethodResponseProto_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_AuthMethodResponseProto_descriptor,
new java.lang.String[] { "Code", "MechanismName", });
internal_static_hadoop_common_UserResponseProto_descriptor =
getDescriptor().getMessageTypes().get(17);
internal_static_hadoop_common_UserResponseProto_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_UserResponseProto_descriptor,
new java.lang.String[] { "User", });
internal_static_hadoop_common_SleepRequestProto2_descriptor =
getDescriptor().getMessageTypes().get(18);
internal_static_hadoop_common_SleepRequestProto2_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_SleepRequestProto2_descriptor,
new java.lang.String[] { "SleepTime", });
internal_static_hadoop_common_SleepResponseProto2_descriptor =
getDescriptor().getMessageTypes().get(19);
internal_static_hadoop_common_SleepResponseProto2_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_SleepResponseProto2_descriptor,
new java.lang.String[] { "ReceiveTime", "ResponseTime", });
}
// @@protoc_insertion_point(outer_class_scope)
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy