org.apache.hadoop.ipc.protobuf.RpcHeaderProtos Maven / Gradle / Ivy
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: RpcHeader.proto
package org.apache.hadoop.ipc.protobuf;
public final class RpcHeaderProtos {
private RpcHeaderProtos() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
/**
* Protobuf enum {@code hadoop.common.RpcKindProto}
*
*
**
* RpcKind determine the rpcEngine and the serialization of the rpc request
*
*/
public enum RpcKindProto
implements com.google.protobuf.ProtocolMessageEnum {
/**
* RPC_BUILTIN = 0;
*
*
* Used for built in calls by tests
*
*/
RPC_BUILTIN(0, 0),
/**
* RPC_WRITABLE = 1;
*
*
* Use WritableRpcEngine
*
*/
RPC_WRITABLE(1, 1),
/**
* RPC_PROTOCOL_BUFFER = 2;
*
*
* Use ProtobufRpcEngine
*
*/
RPC_PROTOCOL_BUFFER(2, 2),
;
/**
* RPC_BUILTIN = 0;
*
*
* Used for built in calls by tests
*
*/
public static final int RPC_BUILTIN_VALUE = 0;
/**
* RPC_WRITABLE = 1;
*
*
* Use WritableRpcEngine
*
*/
public static final int RPC_WRITABLE_VALUE = 1;
/**
* RPC_PROTOCOL_BUFFER = 2;
*
*
* Use ProtobufRpcEngine
*
*/
public static final int RPC_PROTOCOL_BUFFER_VALUE = 2;
public final int getNumber() { return value; }
public static RpcKindProto valueOf(int value) {
switch (value) {
case 0: return RPC_BUILTIN;
case 1: return RPC_WRITABLE;
case 2: return RPC_PROTOCOL_BUFFER;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static com.google.protobuf.Internal.EnumLiteMap
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap() {
public RpcKindProto findValueByNumber(int number) {
return RpcKindProto.valueOf(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(index);
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.getDescriptor().getEnumTypes().get(0);
}
private static final RpcKindProto[] VALUES = values();
public static RpcKindProto valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int index;
private final int value;
private RpcKindProto(int index, int value) {
this.index = index;
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hadoop.common.RpcKindProto)
}
public interface RPCTraceInfoProtoOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional int64 traceId = 1;
/**
* optional int64 traceId = 1;
*
*
* parentIdHigh
*
*/
boolean hasTraceId();
/**
* optional int64 traceId = 1;
*
*
* parentIdHigh
*
*/
long getTraceId();
// optional int64 parentId = 2;
/**
* optional int64 parentId = 2;
*
*
* parentIdLow
*
*/
boolean hasParentId();
/**
* optional int64 parentId = 2;
*
*
* parentIdLow
*
*/
long getParentId();
}
/**
* Protobuf type {@code hadoop.common.RPCTraceInfoProto}
*
*
**
* Used to pass through the information necessary to continue
* a trace after an RPC is made. All we need is the traceid
* (so we know the overarching trace this message is a part of), and
* the id of the current span when this message was sent, so we know
* what span caused the new span we will create when this message is received.
*
*/
public static final class RPCTraceInfoProto extends
com.google.protobuf.GeneratedMessage
implements RPCTraceInfoProtoOrBuilder {
// Use RPCTraceInfoProto.newBuilder() to construct.
private RPCTraceInfoProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private RPCTraceInfoProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final RPCTraceInfoProto defaultInstance;
public static RPCTraceInfoProto getDefaultInstance() {
return defaultInstance;
}
public RPCTraceInfoProto getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private RPCTraceInfoProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
traceId_ = input.readInt64();
break;
}
case 16: {
bitField0_ |= 0x00000002;
parentId_ = input.readInt64();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCTraceInfoProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCTraceInfoProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder.class);
}
public static com.google.protobuf.Parser PARSER =
new com.google.protobuf.AbstractParser() {
public RPCTraceInfoProto parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RPCTraceInfoProto(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
private int bitField0_;
// optional int64 traceId = 1;
public static final int TRACEID_FIELD_NUMBER = 1;
private long traceId_;
/**
* optional int64 traceId = 1;
*
*
* parentIdHigh
*
*/
public boolean hasTraceId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* optional int64 traceId = 1;
*
*
* parentIdHigh
*
*/
public long getTraceId() {
return traceId_;
}
// optional int64 parentId = 2;
public static final int PARENTID_FIELD_NUMBER = 2;
private long parentId_;
/**
* optional int64 parentId = 2;
*
*
* parentIdLow
*
*/
public boolean hasParentId() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* optional int64 parentId = 2;
*
*
* parentIdLow
*
*/
public long getParentId() {
return parentId_;
}
private void initFields() {
traceId_ = 0L;
parentId_ = 0L;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeInt64(1, traceId_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeInt64(2, parentId_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(1, traceId_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(2, parentId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto)) {
return super.equals(obj);
}
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto) obj;
boolean result = true;
result = result && (hasTraceId() == other.hasTraceId());
if (hasTraceId()) {
result = result && (getTraceId()
== other.getTraceId());
}
result = result && (hasParentId() == other.hasParentId());
if (hasParentId()) {
result = result && (getParentId()
== other.getParentId());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasTraceId()) {
hash = (37 * hash) + TRACEID_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getTraceId());
}
if (hasParentId()) {
hash = (37 * hash) + PARENTID_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getParentId());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.common.RPCTraceInfoProto}
*
*
**
* Used to pass through the information necessary to continue
* a trace after an RPC is made. All we need is the traceid
* (so we know the overarching trace this message is a part of), and
* the id of the current span when this message was sent, so we know
* what span caused the new span we will create when this message is received.
*
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder
implements org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCTraceInfoProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCTraceInfoProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder.class);
}
// Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
traceId_ = 0L;
bitField0_ = (bitField0_ & ~0x00000001);
parentId_ = 0L;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCTraceInfoProto_descriptor;
}
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto getDefaultInstanceForType() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance();
}
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto build() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto buildPartial() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.traceId_ = traceId_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.parentId_ = parentId_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto) {
return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto other) {
if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance()) return this;
if (other.hasTraceId()) {
setTraceId(other.getTraceId());
}
if (other.hasParentId()) {
setParentId(other.getParentId());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional int64 traceId = 1;
private long traceId_ ;
/**
* optional int64 traceId = 1;
*
*
* parentIdHigh
*
*/
public boolean hasTraceId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* optional int64 traceId = 1;
*
*
* parentIdHigh
*
*/
public long getTraceId() {
return traceId_;
}
/**
* optional int64 traceId = 1;
*
*
* parentIdHigh
*
*/
public Builder setTraceId(long value) {
bitField0_ |= 0x00000001;
traceId_ = value;
onChanged();
return this;
}
/**
* optional int64 traceId = 1;
*
*
* parentIdHigh
*
*/
public Builder clearTraceId() {
bitField0_ = (bitField0_ & ~0x00000001);
traceId_ = 0L;
onChanged();
return this;
}
// optional int64 parentId = 2;
private long parentId_ ;
/**
* optional int64 parentId = 2;
*
*
* parentIdLow
*
*/
public boolean hasParentId() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* optional int64 parentId = 2;
*
*
* parentIdLow
*
*/
public long getParentId() {
return parentId_;
}
/**
* optional int64 parentId = 2;
*
*
* parentIdLow
*
*/
public Builder setParentId(long value) {
bitField0_ |= 0x00000002;
parentId_ = value;
onChanged();
return this;
}
/**
* optional int64 parentId = 2;
*
*
* parentIdLow
*
*/
public Builder clearParentId() {
bitField0_ = (bitField0_ & ~0x00000002);
parentId_ = 0L;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hadoop.common.RPCTraceInfoProto)
}
static {
defaultInstance = new RPCTraceInfoProto(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hadoop.common.RPCTraceInfoProto)
}
public interface RPCCallerContextProtoOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string context = 1;
/**
* required string context = 1;
*/
boolean hasContext();
/**
* required string context = 1;
*/
java.lang.String getContext();
/**
* required string context = 1;
*/
com.google.protobuf.ByteString
getContextBytes();
// optional bytes signature = 2;
/**
* optional bytes signature = 2;
*/
boolean hasSignature();
/**
* optional bytes signature = 2;
*/
com.google.protobuf.ByteString getSignature();
}
/**
* Protobuf type {@code hadoop.common.RPCCallerContextProto}
*
*
**
* Used to pass through the call context entry after an RPC is made.
*
*/
public static final class RPCCallerContextProto extends
com.google.protobuf.GeneratedMessage
implements RPCCallerContextProtoOrBuilder {
// Use RPCCallerContextProto.newBuilder() to construct.
private RPCCallerContextProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private RPCCallerContextProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final RPCCallerContextProto defaultInstance;
public static RPCCallerContextProto getDefaultInstance() {
return defaultInstance;
}
public RPCCallerContextProto getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private RPCCallerContextProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
context_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
signature_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCCallerContextProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCCallerContextProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.Builder.class);
}
public static com.google.protobuf.Parser PARSER =
new com.google.protobuf.AbstractParser() {
public RPCCallerContextProto parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RPCCallerContextProto(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
private int bitField0_;
// required string context = 1;
public static final int CONTEXT_FIELD_NUMBER = 1;
private java.lang.Object context_;
/**
* required string context = 1;
*/
public boolean hasContext() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* required string context = 1;
*/
public java.lang.String getContext() {
java.lang.Object ref = context_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
context_ = s;
}
return s;
}
}
/**
* required string context = 1;
*/
public com.google.protobuf.ByteString
getContextBytes() {
java.lang.Object ref = context_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
context_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional bytes signature = 2;
public static final int SIGNATURE_FIELD_NUMBER = 2;
private com.google.protobuf.ByteString signature_;
/**
* optional bytes signature = 2;
*/
public boolean hasSignature() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* optional bytes signature = 2;
*/
public com.google.protobuf.ByteString getSignature() {
return signature_;
}
private void initFields() {
context_ = "";
signature_ = com.google.protobuf.ByteString.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasContext()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getContextBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, signature_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getContextBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, signature_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto)) {
return super.equals(obj);
}
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto) obj;
boolean result = true;
result = result && (hasContext() == other.hasContext());
if (hasContext()) {
result = result && getContext()
.equals(other.getContext());
}
result = result && (hasSignature() == other.hasSignature());
if (hasSignature()) {
result = result && getSignature()
.equals(other.getSignature());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasContext()) {
hash = (37 * hash) + CONTEXT_FIELD_NUMBER;
hash = (53 * hash) + getContext().hashCode();
}
if (hasSignature()) {
hash = (37 * hash) + SIGNATURE_FIELD_NUMBER;
hash = (53 * hash) + getSignature().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.common.RPCCallerContextProto}
*
*
**
* Used to pass through the call context entry after an RPC is made.
*
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder
implements org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCCallerContextProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCCallerContextProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.Builder.class);
}
// Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
context_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
signature_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCCallerContextProto_descriptor;
}
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto getDefaultInstanceForType() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.getDefaultInstance();
}
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto build() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto buildPartial() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.context_ = context_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.signature_ = signature_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto) {
return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto other) {
if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.getDefaultInstance()) return this;
if (other.hasContext()) {
bitField0_ |= 0x00000001;
context_ = other.context_;
onChanged();
}
if (other.hasSignature()) {
setSignature(other.getSignature());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasContext()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string context = 1;
private java.lang.Object context_ = "";
/**
* required string context = 1;
*/
public boolean hasContext() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* required string context = 1;
*/
public java.lang.String getContext() {
java.lang.Object ref = context_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
context_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string context = 1;
*/
public com.google.protobuf.ByteString
getContextBytes() {
java.lang.Object ref = context_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
context_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* required string context = 1;
*/
public Builder setContext(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
context_ = value;
onChanged();
return this;
}
/**
* required string context = 1;
*/
public Builder clearContext() {
bitField0_ = (bitField0_ & ~0x00000001);
context_ = getDefaultInstance().getContext();
onChanged();
return this;
}
/**
* required string context = 1;
*/
public Builder setContextBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
context_ = value;
onChanged();
return this;
}
// optional bytes signature = 2;
private com.google.protobuf.ByteString signature_ = com.google.protobuf.ByteString.EMPTY;
/**
* optional bytes signature = 2;
*/
public boolean hasSignature() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* optional bytes signature = 2;
*/
public com.google.protobuf.ByteString getSignature() {
return signature_;
}
/**
* optional bytes signature = 2;
*/
public Builder setSignature(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
signature_ = value;
onChanged();
return this;
}
/**
* optional bytes signature = 2;
*/
public Builder clearSignature() {
bitField0_ = (bitField0_ & ~0x00000002);
signature_ = getDefaultInstance().getSignature();
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hadoop.common.RPCCallerContextProto)
}
static {
defaultInstance = new RPCCallerContextProto(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hadoop.common.RPCCallerContextProto)
}
public interface RpcRequestHeaderProtoOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional .hadoop.common.RpcKindProto rpcKind = 1;
/**
* optional .hadoop.common.RpcKindProto rpcKind = 1;
*/
boolean hasRpcKind();
/**
* optional .hadoop.common.RpcKindProto rpcKind = 1;
*/
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto getRpcKind();
// optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
/**
* optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
*/
boolean hasRpcOp();
/**
* optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
*/
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto getRpcOp();
// required sint32 callId = 3;
/**
* required sint32 callId = 3;
*
*
* a sequence number that is sent back in response
*
*/
boolean hasCallId();
/**
* required sint32 callId = 3;
*
*
* a sequence number that is sent back in response
*
*/
int getCallId();
// required bytes clientId = 4;
/**
* required bytes clientId = 4;
*
*
* Globally unique client ID
*
*/
boolean hasClientId();
/**
* required bytes clientId = 4;
*
*
* Globally unique client ID
*
*/
com.google.protobuf.ByteString getClientId();
// optional sint32 retryCount = 5 [default = -1];
/**
* optional sint32 retryCount = 5 [default = -1];
*
*
* clientId + callId uniquely identifies a request
* retry count, 1 means this is the first retry
*
*/
boolean hasRetryCount();
/**
* optional sint32 retryCount = 5 [default = -1];
*
*
* clientId + callId uniquely identifies a request
* retry count, 1 means this is the first retry
*
*/
int getRetryCount();
// optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
/**
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
*
*
* tracing info
*
*/
boolean hasTraceInfo();
/**
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
*
*
* tracing info
*
*/
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto getTraceInfo();
/**
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
*
*
* tracing info
*
*/
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder getTraceInfoOrBuilder();
// optional .hadoop.common.RPCCallerContextProto callerContext = 7;
/**
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
*
*
* call context
*
*/
boolean hasCallerContext();
/**
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
*
*
* call context
*
*/
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto getCallerContext();
/**
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
*
*
* call context
*
*/
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProtoOrBuilder getCallerContextOrBuilder();
}
/**
* Protobuf type {@code hadoop.common.RpcRequestHeaderProto}
*
*
* the header for the RpcRequest
*
*/
public static final class RpcRequestHeaderProto extends
com.google.protobuf.GeneratedMessage
implements RpcRequestHeaderProtoOrBuilder {
// Use RpcRequestHeaderProto.newBuilder() to construct.
private RpcRequestHeaderProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private RpcRequestHeaderProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final RpcRequestHeaderProto defaultInstance;
public static RpcRequestHeaderProto getDefaultInstance() {
return defaultInstance;
}
public RpcRequestHeaderProto getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private RpcRequestHeaderProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
int rawValue = input.readEnum();
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto value = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
bitField0_ |= 0x00000001;
rpcKind_ = value;
}
break;
}
case 16: {
int rawValue = input.readEnum();
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto value = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(2, rawValue);
} else {
bitField0_ |= 0x00000002;
rpcOp_ = value;
}
break;
}
case 24: {
bitField0_ |= 0x00000004;
callId_ = input.readSInt32();
break;
}
case 34: {
bitField0_ |= 0x00000008;
clientId_ = input.readBytes();
break;
}
case 40: {
bitField0_ |= 0x00000010;
retryCount_ = input.readSInt32();
break;
}
case 50: {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder subBuilder = null;
if (((bitField0_ & 0x00000020) == 0x00000020)) {
subBuilder = traceInfo_.toBuilder();
}
traceInfo_ = input.readMessage(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(traceInfo_);
traceInfo_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000020;
break;
}
case 58: {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.Builder subBuilder = null;
if (((bitField0_ & 0x00000040) == 0x00000040)) {
subBuilder = callerContext_.toBuilder();
}
callerContext_ = input.readMessage(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(callerContext_);
callerContext_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000040;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.Builder.class);
}
public static com.google.protobuf.Parser PARSER =
new com.google.protobuf.AbstractParser() {
public RpcRequestHeaderProto parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RpcRequestHeaderProto(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
/**
* Protobuf enum {@code hadoop.common.RpcRequestHeaderProto.OperationProto}
*/
public enum OperationProto
implements com.google.protobuf.ProtocolMessageEnum {
/**
* RPC_FINAL_PACKET = 0;
*
*
* The final RPC Packet
*
*/
RPC_FINAL_PACKET(0, 0),
/**
* RPC_CONTINUATION_PACKET = 1;
*
*
* not implemented yet
*
*/
RPC_CONTINUATION_PACKET(1, 1),
/**
* RPC_CLOSE_CONNECTION = 2;
*
*
* close the rpc connection
*
*/
RPC_CLOSE_CONNECTION(2, 2),
;
/**
* RPC_FINAL_PACKET = 0;
*
*
* The final RPC Packet
*
*/
public static final int RPC_FINAL_PACKET_VALUE = 0;
/**
* RPC_CONTINUATION_PACKET = 1;
*
*
* not implemented yet
*
*/
public static final int RPC_CONTINUATION_PACKET_VALUE = 1;
/**
* RPC_CLOSE_CONNECTION = 2;
*
*
* close the rpc connection
*
*/
public static final int RPC_CLOSE_CONNECTION_VALUE = 2;
public final int getNumber() { return value; }
public static OperationProto valueOf(int value) {
switch (value) {
case 0: return RPC_FINAL_PACKET;
case 1: return RPC_CONTINUATION_PACKET;
case 2: return RPC_CLOSE_CONNECTION;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static com.google.protobuf.Internal.EnumLiteMap
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap() {
public OperationProto findValueByNumber(int number) {
return OperationProto.valueOf(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(index);
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.getDescriptor().getEnumTypes().get(0);
}
private static final OperationProto[] VALUES = values();
public static OperationProto valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int index;
private final int value;
private OperationProto(int index, int value) {
this.index = index;
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hadoop.common.RpcRequestHeaderProto.OperationProto)
}
private int bitField0_;
// optional .hadoop.common.RpcKindProto rpcKind = 1;
public static final int RPCKIND_FIELD_NUMBER = 1;
private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto rpcKind_;
/**
* optional .hadoop.common.RpcKindProto rpcKind = 1;
*/
public boolean hasRpcKind() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* optional .hadoop.common.RpcKindProto rpcKind = 1;
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto getRpcKind() {
return rpcKind_;
}
// optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
public static final int RPCOP_FIELD_NUMBER = 2;
private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto rpcOp_;
/**
* optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
*/
public boolean hasRpcOp() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto getRpcOp() {
return rpcOp_;
}
// required sint32 callId = 3;
public static final int CALLID_FIELD_NUMBER = 3;
private int callId_;
/**
* required sint32 callId = 3;
*
*
* a sequence number that is sent back in response
*
*/
public boolean hasCallId() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* required sint32 callId = 3;
*
*
* a sequence number that is sent back in response
*
*/
public int getCallId() {
return callId_;
}
// required bytes clientId = 4;
public static final int CLIENTID_FIELD_NUMBER = 4;
private com.google.protobuf.ByteString clientId_;
/**
* required bytes clientId = 4;
*
*
* Globally unique client ID
*
*/
public boolean hasClientId() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* required bytes clientId = 4;
*
*
* Globally unique client ID
*
*/
public com.google.protobuf.ByteString getClientId() {
return clientId_;
}
// optional sint32 retryCount = 5 [default = -1];
public static final int RETRYCOUNT_FIELD_NUMBER = 5;
private int retryCount_;
/**
* optional sint32 retryCount = 5 [default = -1];
*
*
* clientId + callId uniquely identifies a request
* retry count, 1 means this is the first retry
*
*/
public boolean hasRetryCount() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* optional sint32 retryCount = 5 [default = -1];
*
*
* clientId + callId uniquely identifies a request
* retry count, 1 means this is the first retry
*
*/
public int getRetryCount() {
return retryCount_;
}
// optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
public static final int TRACEINFO_FIELD_NUMBER = 6;
private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto traceInfo_;
/**
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
*
*
* tracing info
*
*/
public boolean hasTraceInfo() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
*
*
* tracing info
*
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto getTraceInfo() {
return traceInfo_;
}
/**
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
*
*
* tracing info
*
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder getTraceInfoOrBuilder() {
return traceInfo_;
}
// optional .hadoop.common.RPCCallerContextProto callerContext = 7;
public static final int CALLERCONTEXT_FIELD_NUMBER = 7;
private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto callerContext_;
/**
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
*
*
* call context
*
*/
public boolean hasCallerContext() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
*
*
* call context
*
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto getCallerContext() {
return callerContext_;
}
/**
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
*
*
* call context
*
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProtoOrBuilder getCallerContextOrBuilder() {
return callerContext_;
}
private void initFields() {
rpcKind_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.RPC_BUILTIN;
rpcOp_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.RPC_FINAL_PACKET;
callId_ = 0;
clientId_ = com.google.protobuf.ByteString.EMPTY;
retryCount_ = -1;
traceInfo_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance();
callerContext_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasCallId()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasClientId()) {
memoizedIsInitialized = 0;
return false;
}
if (hasCallerContext()) {
if (!getCallerContext().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeEnum(1, rpcKind_.getNumber());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeEnum(2, rpcOp_.getNumber());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeSInt32(3, callId_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeBytes(4, clientId_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeSInt32(5, retryCount_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
output.writeMessage(6, traceInfo_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
output.writeMessage(7, callerContext_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, rpcKind_.getNumber());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(2, rpcOp_.getNumber());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeSInt32Size(3, callId_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(4, clientId_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeSInt32Size(5, retryCount_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(6, traceInfo_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(7, callerContext_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto)) {
return super.equals(obj);
}
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto) obj;
boolean result = true;
result = result && (hasRpcKind() == other.hasRpcKind());
if (hasRpcKind()) {
result = result &&
(getRpcKind() == other.getRpcKind());
}
result = result && (hasRpcOp() == other.hasRpcOp());
if (hasRpcOp()) {
result = result &&
(getRpcOp() == other.getRpcOp());
}
result = result && (hasCallId() == other.hasCallId());
if (hasCallId()) {
result = result && (getCallId()
== other.getCallId());
}
result = result && (hasClientId() == other.hasClientId());
if (hasClientId()) {
result = result && getClientId()
.equals(other.getClientId());
}
result = result && (hasRetryCount() == other.hasRetryCount());
if (hasRetryCount()) {
result = result && (getRetryCount()
== other.getRetryCount());
}
result = result && (hasTraceInfo() == other.hasTraceInfo());
if (hasTraceInfo()) {
result = result && getTraceInfo()
.equals(other.getTraceInfo());
}
result = result && (hasCallerContext() == other.hasCallerContext());
if (hasCallerContext()) {
result = result && getCallerContext()
.equals(other.getCallerContext());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasRpcKind()) {
hash = (37 * hash) + RPCKIND_FIELD_NUMBER;
hash = (53 * hash) + hashEnum(getRpcKind());
}
if (hasRpcOp()) {
hash = (37 * hash) + RPCOP_FIELD_NUMBER;
hash = (53 * hash) + hashEnum(getRpcOp());
}
if (hasCallId()) {
hash = (37 * hash) + CALLID_FIELD_NUMBER;
hash = (53 * hash) + getCallId();
}
if (hasClientId()) {
hash = (37 * hash) + CLIENTID_FIELD_NUMBER;
hash = (53 * hash) + getClientId().hashCode();
}
if (hasRetryCount()) {
hash = (37 * hash) + RETRYCOUNT_FIELD_NUMBER;
hash = (53 * hash) + getRetryCount();
}
if (hasTraceInfo()) {
hash = (37 * hash) + TRACEINFO_FIELD_NUMBER;
hash = (53 * hash) + getTraceInfo().hashCode();
}
if (hasCallerContext()) {
hash = (37 * hash) + CALLERCONTEXT_FIELD_NUMBER;
hash = (53 * hash) + getCallerContext().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.common.RpcRequestHeaderProto}
*
*
* the header for the RpcRequest
*
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder
implements org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.Builder.class);
}
// Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getTraceInfoFieldBuilder();
getCallerContextFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
rpcKind_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.RPC_BUILTIN;
bitField0_ = (bitField0_ & ~0x00000001);
rpcOp_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.RPC_FINAL_PACKET;
bitField0_ = (bitField0_ & ~0x00000002);
callId_ = 0;
bitField0_ = (bitField0_ & ~0x00000004);
clientId_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000008);
retryCount_ = -1;
bitField0_ = (bitField0_ & ~0x00000010);
if (traceInfoBuilder_ == null) {
traceInfo_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance();
} else {
traceInfoBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000020);
if (callerContextBuilder_ == null) {
callerContext_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.getDefaultInstance();
} else {
callerContextBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000040);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_descriptor;
}
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto getDefaultInstanceForType() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.getDefaultInstance();
}
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto build() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto buildPartial() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.rpcKind_ = rpcKind_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.rpcOp_ = rpcOp_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.callId_ = callId_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.clientId_ = clientId_;
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
result.retryCount_ = retryCount_;
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000020;
}
if (traceInfoBuilder_ == null) {
result.traceInfo_ = traceInfo_;
} else {
result.traceInfo_ = traceInfoBuilder_.build();
}
if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
to_bitField0_ |= 0x00000040;
}
if (callerContextBuilder_ == null) {
result.callerContext_ = callerContext_;
} else {
result.callerContext_ = callerContextBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto) {
return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto other) {
if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.getDefaultInstance()) return this;
if (other.hasRpcKind()) {
setRpcKind(other.getRpcKind());
}
if (other.hasRpcOp()) {
setRpcOp(other.getRpcOp());
}
if (other.hasCallId()) {
setCallId(other.getCallId());
}
if (other.hasClientId()) {
setClientId(other.getClientId());
}
if (other.hasRetryCount()) {
setRetryCount(other.getRetryCount());
}
if (other.hasTraceInfo()) {
mergeTraceInfo(other.getTraceInfo());
}
if (other.hasCallerContext()) {
mergeCallerContext(other.getCallerContext());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasCallId()) {
return false;
}
if (!hasClientId()) {
return false;
}
if (hasCallerContext()) {
if (!getCallerContext().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional .hadoop.common.RpcKindProto rpcKind = 1;
private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto rpcKind_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.RPC_BUILTIN;
/**
* optional .hadoop.common.RpcKindProto rpcKind = 1;
*/
public boolean hasRpcKind() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* optional .hadoop.common.RpcKindProto rpcKind = 1;
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto getRpcKind() {
return rpcKind_;
}
/**
* optional .hadoop.common.RpcKindProto rpcKind = 1;
*/
public Builder setRpcKind(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
rpcKind_ = value;
onChanged();
return this;
}
/**
* optional .hadoop.common.RpcKindProto rpcKind = 1;
*/
public Builder clearRpcKind() {
bitField0_ = (bitField0_ & ~0x00000001);
rpcKind_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.RPC_BUILTIN;
onChanged();
return this;
}
// optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto rpcOp_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.RPC_FINAL_PACKET;
/**
* optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
*/
public boolean hasRpcOp() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto getRpcOp() {
return rpcOp_;
}
/**
* optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
*/
public Builder setRpcOp(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
rpcOp_ = value;
onChanged();
return this;
}
/**
* optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
*/
public Builder clearRpcOp() {
bitField0_ = (bitField0_ & ~0x00000002);
rpcOp_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.RPC_FINAL_PACKET;
onChanged();
return this;
}
// required sint32 callId = 3;
private int callId_ ;
/**
* required sint32 callId = 3;
*
*
* a sequence number that is sent back in response
*
*/
public boolean hasCallId() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* required sint32 callId = 3;
*
*
* a sequence number that is sent back in response
*
*/
public int getCallId() {
return callId_;
}
/**
* required sint32 callId = 3;
*
*
* a sequence number that is sent back in response
*
*/
public Builder setCallId(int value) {
bitField0_ |= 0x00000004;
callId_ = value;
onChanged();
return this;
}
/**
* required sint32 callId = 3;
*
*
* a sequence number that is sent back in response
*
*/
public Builder clearCallId() {
bitField0_ = (bitField0_ & ~0x00000004);
callId_ = 0;
onChanged();
return this;
}
// required bytes clientId = 4;
private com.google.protobuf.ByteString clientId_ = com.google.protobuf.ByteString.EMPTY;
/**
* required bytes clientId = 4;
*
*
* Globally unique client ID
*
*/
public boolean hasClientId() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* required bytes clientId = 4;
*
*
* Globally unique client ID
*
*/
public com.google.protobuf.ByteString getClientId() {
return clientId_;
}
/**
* required bytes clientId = 4;
*
*
* Globally unique client ID
*
*/
public Builder setClientId(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
clientId_ = value;
onChanged();
return this;
}
/**
* required bytes clientId = 4;
*
*
* Globally unique client ID
*
*/
public Builder clearClientId() {
bitField0_ = (bitField0_ & ~0x00000008);
clientId_ = getDefaultInstance().getClientId();
onChanged();
return this;
}
// optional sint32 retryCount = 5 [default = -1];
private int retryCount_ = -1;
/**
* optional sint32 retryCount = 5 [default = -1];
*
*
* clientId + callId uniquely identifies a request
* retry count, 1 means this is the first retry
*
*/
public boolean hasRetryCount() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* optional sint32 retryCount = 5 [default = -1];
*
*
* clientId + callId uniquely identifies a request
* retry count, 1 means this is the first retry
*
*/
public int getRetryCount() {
return retryCount_;
}
/**
* optional sint32 retryCount = 5 [default = -1];
*
*
* clientId + callId uniquely identifies a request
* retry count, 1 means this is the first retry
*
*/
public Builder setRetryCount(int value) {
bitField0_ |= 0x00000010;
retryCount_ = value;
onChanged();
return this;
}
/**
* optional sint32 retryCount = 5 [default = -1];
*
*
* clientId + callId uniquely identifies a request
* retry count, 1 means this is the first retry
*
*/
public Builder clearRetryCount() {
bitField0_ = (bitField0_ & ~0x00000010);
retryCount_ = -1;
onChanged();
return this;
}
// optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto traceInfo_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder> traceInfoBuilder_;
/**
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
*
*
* tracing info
*
*/
public boolean hasTraceInfo() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
*
*
* tracing info
*
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto getTraceInfo() {
if (traceInfoBuilder_ == null) {
return traceInfo_;
} else {
return traceInfoBuilder_.getMessage();
}
}
/**
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
*
*
* tracing info
*
*/
public Builder setTraceInfo(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto value) {
if (traceInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
traceInfo_ = value;
onChanged();
} else {
traceInfoBuilder_.setMessage(value);
}
bitField0_ |= 0x00000020;
return this;
}
/**
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
*
*
* tracing info
*
*/
public Builder setTraceInfo(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder builderForValue) {
if (traceInfoBuilder_ == null) {
traceInfo_ = builderForValue.build();
onChanged();
} else {
traceInfoBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000020;
return this;
}
/**
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
*
*
* tracing info
*
*/
public Builder mergeTraceInfo(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto value) {
if (traceInfoBuilder_ == null) {
if (((bitField0_ & 0x00000020) == 0x00000020) &&
traceInfo_ != org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance()) {
traceInfo_ =
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.newBuilder(traceInfo_).mergeFrom(value).buildPartial();
} else {
traceInfo_ = value;
}
onChanged();
} else {
traceInfoBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000020;
return this;
}
/**
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
*
*
* tracing info
*
*/
public Builder clearTraceInfo() {
if (traceInfoBuilder_ == null) {
traceInfo_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance();
onChanged();
} else {
traceInfoBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000020);
return this;
}
/**
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
*
*
* tracing info
*
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder getTraceInfoBuilder() {
bitField0_ |= 0x00000020;
onChanged();
return getTraceInfoFieldBuilder().getBuilder();
}
/**
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
*
*
* tracing info
*
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder getTraceInfoOrBuilder() {
if (traceInfoBuilder_ != null) {
return traceInfoBuilder_.getMessageOrBuilder();
} else {
return traceInfo_;
}
}
/**
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
*
*
* tracing info
*
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder>
getTraceInfoFieldBuilder() {
if (traceInfoBuilder_ == null) {
traceInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder>(
traceInfo_,
getParentForChildren(),
isClean());
traceInfo_ = null;
}
return traceInfoBuilder_;
}
// optional .hadoop.common.RPCCallerContextProto callerContext = 7;
private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto callerContext_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProtoOrBuilder> callerContextBuilder_;
/**
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
*
*
* call context
*
*/
public boolean hasCallerContext() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
*
*
* call context
*
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto getCallerContext() {
if (callerContextBuilder_ == null) {
return callerContext_;
} else {
return callerContextBuilder_.getMessage();
}
}
/**
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
*
*
* call context
*
*/
public Builder setCallerContext(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto value) {
if (callerContextBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
callerContext_ = value;
onChanged();
} else {
callerContextBuilder_.setMessage(value);
}
bitField0_ |= 0x00000040;
return this;
}
/**
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
*
*
* call context
*
*/
public Builder setCallerContext(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.Builder builderForValue) {
if (callerContextBuilder_ == null) {
callerContext_ = builderForValue.build();
onChanged();
} else {
callerContextBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000040;
return this;
}
/**
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
*
*
* call context
*
*/
public Builder mergeCallerContext(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto value) {
if (callerContextBuilder_ == null) {
if (((bitField0_ & 0x00000040) == 0x00000040) &&
callerContext_ != org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.getDefaultInstance()) {
callerContext_ =
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.newBuilder(callerContext_).mergeFrom(value).buildPartial();
} else {
callerContext_ = value;
}
onChanged();
} else {
callerContextBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000040;
return this;
}
/**
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
*
*
* call context
*
*/
public Builder clearCallerContext() {
if (callerContextBuilder_ == null) {
callerContext_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.getDefaultInstance();
onChanged();
} else {
callerContextBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000040);
return this;
}
/**
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
*
*
* call context
*
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.Builder getCallerContextBuilder() {
bitField0_ |= 0x00000040;
onChanged();
return getCallerContextFieldBuilder().getBuilder();
}
/**
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
*
*
* call context
*
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProtoOrBuilder getCallerContextOrBuilder() {
if (callerContextBuilder_ != null) {
return callerContextBuilder_.getMessageOrBuilder();
} else {
return callerContext_;
}
}
/**
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
*
*
* call context
*
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProtoOrBuilder>
getCallerContextFieldBuilder() {
if (callerContextBuilder_ == null) {
callerContextBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProtoOrBuilder>(
callerContext_,
getParentForChildren(),
isClean());
callerContext_ = null;
}
return callerContextBuilder_;
}
// @@protoc_insertion_point(builder_scope:hadoop.common.RpcRequestHeaderProto)
}
static {
defaultInstance = new RpcRequestHeaderProto(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hadoop.common.RpcRequestHeaderProto)
}
public interface RpcResponseHeaderProtoOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required uint32 callId = 1;
/**
* required uint32 callId = 1;
*
*
* callId used in Request
*
*/
boolean hasCallId();
/**
* required uint32 callId = 1;
*
*
* callId used in Request
*
*/
int getCallId();
// required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
/**
* required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
*/
boolean hasStatus();
/**
* required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
*/
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto getStatus();
// optional uint32 serverIpcVersionNum = 3;
/**
* optional uint32 serverIpcVersionNum = 3;
*
*
* Sent if success or fail
*
*/
boolean hasServerIpcVersionNum();
/**
* optional uint32 serverIpcVersionNum = 3;
*
*
* Sent if success or fail
*
*/
int getServerIpcVersionNum();
// optional string exceptionClassName = 4;
/**
* optional string exceptionClassName = 4;
*
*
* if request fails
*
*/
boolean hasExceptionClassName();
/**
* optional string exceptionClassName = 4;
*
*
* if request fails
*
*/
java.lang.String getExceptionClassName();
/**
* optional string exceptionClassName = 4;
*
*
* if request fails
*
*/
com.google.protobuf.ByteString
getExceptionClassNameBytes();
// optional string errorMsg = 5;
/**
* optional string errorMsg = 5;
*
*
* if request fails, often contains strack trace
*
*/
boolean hasErrorMsg();
/**
* optional string errorMsg = 5;
*
*
* if request fails, often contains strack trace
*
*/
java.lang.String getErrorMsg();
/**
* optional string errorMsg = 5;
*
*
* if request fails, often contains strack trace
*
*/
com.google.protobuf.ByteString
getErrorMsgBytes();
// optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
/**
* optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
*
*
* in case of error
*
*/
boolean hasErrorDetail();
/**
* optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
*
*
* in case of error
*
*/
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto getErrorDetail();
// optional bytes clientId = 7;
/**
* optional bytes clientId = 7;
*
*
* Globally unique client ID
*
*/
boolean hasClientId();
/**
* optional bytes clientId = 7;
*
*
* Globally unique client ID
*
*/
com.google.protobuf.ByteString getClientId();
// optional sint32 retryCount = 8 [default = -1];
/**
* optional sint32 retryCount = 8 [default = -1];
*/
boolean hasRetryCount();
/**
* optional sint32 retryCount = 8 [default = -1];
*/
int getRetryCount();
}
/**
* Protobuf type {@code hadoop.common.RpcResponseHeaderProto}
*
*
**
* Rpc Response Header
* +------------------------------------------------------------------+
* | Rpc total response length in bytes (4 bytes int) |
* | (sum of next two parts) |
* +------------------------------------------------------------------+
* | RpcResponseHeaderProto - serialized delimited ie has len |
* +------------------------------------------------------------------+
* | if request is successful: |
* | - RpcResponse - The actual rpc response bytes follow |
* | the response header |
* | This response is serialized based on RpcKindProto |
* | if request fails : |
* | The rpc response header contains the necessary info |
* +------------------------------------------------------------------+
*
* Note that rpc response header is also used when connection setup fails.
* Ie the response looks like a rpc response with a fake callId.
*
*/
public static final class RpcResponseHeaderProto extends
com.google.protobuf.GeneratedMessage
implements RpcResponseHeaderProtoOrBuilder {
// Use RpcResponseHeaderProto.newBuilder() to construct.
private RpcResponseHeaderProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private RpcResponseHeaderProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final RpcResponseHeaderProto defaultInstance;
public static RpcResponseHeaderProto getDefaultInstance() {
return defaultInstance;
}
public RpcResponseHeaderProto getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private RpcResponseHeaderProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
callId_ = input.readUInt32();
break;
}
case 16: {
int rawValue = input.readEnum();
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto value = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(2, rawValue);
} else {
bitField0_ |= 0x00000002;
status_ = value;
}
break;
}
case 24: {
bitField0_ |= 0x00000004;
serverIpcVersionNum_ = input.readUInt32();
break;
}
case 34: {
bitField0_ |= 0x00000008;
exceptionClassName_ = input.readBytes();
break;
}
case 42: {
bitField0_ |= 0x00000010;
errorMsg_ = input.readBytes();
break;
}
case 48: {
int rawValue = input.readEnum();
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto value = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(6, rawValue);
} else {
bitField0_ |= 0x00000020;
errorDetail_ = value;
}
break;
}
case 58: {
bitField0_ |= 0x00000040;
clientId_ = input.readBytes();
break;
}
case 64: {
bitField0_ |= 0x00000080;
retryCount_ = input.readSInt32();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.Builder.class);
}
public static com.google.protobuf.Parser PARSER =
new com.google.protobuf.AbstractParser() {
public RpcResponseHeaderProto parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RpcResponseHeaderProto(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
/**
* Protobuf enum {@code hadoop.common.RpcResponseHeaderProto.RpcStatusProto}
*/
public enum RpcStatusProto
implements com.google.protobuf.ProtocolMessageEnum {
/**
* SUCCESS = 0;
*
*
* RPC succeeded
*
*/
SUCCESS(0, 0),
/**
* ERROR = 1;
*
*
* RPC or error - connection left open for future calls
*
*/
ERROR(1, 1),
/**
* FATAL = 2;
*
*
* Fatal error - connection closed
*
*/
FATAL(2, 2),
;
/**
* SUCCESS = 0;
*
*
* RPC succeeded
*
*/
public static final int SUCCESS_VALUE = 0;
/**
* ERROR = 1;
*
*
* RPC or error - connection left open for future calls
*
*/
public static final int ERROR_VALUE = 1;
/**
* FATAL = 2;
*
*
* Fatal error - connection closed
*
*/
public static final int FATAL_VALUE = 2;
public final int getNumber() { return value; }
public static RpcStatusProto valueOf(int value) {
switch (value) {
case 0: return SUCCESS;
case 1: return ERROR;
case 2: return FATAL;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static com.google.protobuf.Internal.EnumLiteMap
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap() {
public RpcStatusProto findValueByNumber(int number) {
return RpcStatusProto.valueOf(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(index);
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.getDescriptor().getEnumTypes().get(0);
}
private static final RpcStatusProto[] VALUES = values();
public static RpcStatusProto valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int index;
private final int value;
private RpcStatusProto(int index, int value) {
this.index = index;
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hadoop.common.RpcResponseHeaderProto.RpcStatusProto)
}
/**
* Protobuf enum {@code hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto}
*/
public enum RpcErrorCodeProto
implements com.google.protobuf.ProtocolMessageEnum {
/**
* ERROR_APPLICATION = 1;
*
*
* Non-fatal Rpc error - connection left open for future rpc calls
*
*/
ERROR_APPLICATION(0, 1),
/**
* ERROR_NO_SUCH_METHOD = 2;
*
*
* Rpc error - no such method
*
*/
ERROR_NO_SUCH_METHOD(1, 2),
/**
* ERROR_NO_SUCH_PROTOCOL = 3;
*
*
* Rpc error - no such protocol
*
*/
ERROR_NO_SUCH_PROTOCOL(2, 3),
/**
* ERROR_RPC_SERVER = 4;
*
*
* Rpc error on server side
*
*/
ERROR_RPC_SERVER(3, 4),
/**
* ERROR_SERIALIZING_RESPONSE = 5;
*
*
* error serializign response
*
*/
ERROR_SERIALIZING_RESPONSE(4, 5),
/**
* ERROR_RPC_VERSION_MISMATCH = 6;
*
*
* Rpc protocol version mismatch
*
*/
ERROR_RPC_VERSION_MISMATCH(5, 6),
/**
* FATAL_UNKNOWN = 10;
*
*
* Fatal Server side Rpc error - connection closed
*
*/
FATAL_UNKNOWN(6, 10),
/**
* FATAL_UNSUPPORTED_SERIALIZATION = 11;
*
*
* IPC layer serilization type invalid
*
*/
FATAL_UNSUPPORTED_SERIALIZATION(7, 11),
/**
* FATAL_INVALID_RPC_HEADER = 12;
*
*
* fields of RpcHeader are invalid
*
*/
FATAL_INVALID_RPC_HEADER(8, 12),
/**
* FATAL_DESERIALIZING_REQUEST = 13;
*
*
* could not deserilize rpc request
*
*/
FATAL_DESERIALIZING_REQUEST(9, 13),
/**
* FATAL_VERSION_MISMATCH = 14;
*
*
* Ipc Layer version mismatch
*
*/
FATAL_VERSION_MISMATCH(10, 14),
/**
* FATAL_UNAUTHORIZED = 15;
*
*
* Auth failed
*
*/
FATAL_UNAUTHORIZED(11, 15),
;
/**
* ERROR_APPLICATION = 1;
*
*
* Non-fatal Rpc error - connection left open for future rpc calls
*
*/
public static final int ERROR_APPLICATION_VALUE = 1;
/**
* ERROR_NO_SUCH_METHOD = 2;
*
*
* Rpc error - no such method
*
*/
public static final int ERROR_NO_SUCH_METHOD_VALUE = 2;
/**
* ERROR_NO_SUCH_PROTOCOL = 3;
*
*
* Rpc error - no such protocol
*
*/
public static final int ERROR_NO_SUCH_PROTOCOL_VALUE = 3;
/**
* ERROR_RPC_SERVER = 4;
*
*
* Rpc error on server side
*
*/
public static final int ERROR_RPC_SERVER_VALUE = 4;
/**
* ERROR_SERIALIZING_RESPONSE = 5;
*
*
* error serializign response
*
*/
public static final int ERROR_SERIALIZING_RESPONSE_VALUE = 5;
/**
* ERROR_RPC_VERSION_MISMATCH = 6;
*
*
* Rpc protocol version mismatch
*
*/
public static final int ERROR_RPC_VERSION_MISMATCH_VALUE = 6;
/**
* FATAL_UNKNOWN = 10;
*
*
* Fatal Server side Rpc error - connection closed
*
*/
public static final int FATAL_UNKNOWN_VALUE = 10;
/**
* FATAL_UNSUPPORTED_SERIALIZATION = 11;
*
*
* IPC layer serilization type invalid
*
*/
public static final int FATAL_UNSUPPORTED_SERIALIZATION_VALUE = 11;
/**
* FATAL_INVALID_RPC_HEADER = 12;
*
*
* fields of RpcHeader are invalid
*
*/
public static final int FATAL_INVALID_RPC_HEADER_VALUE = 12;
/**
* FATAL_DESERIALIZING_REQUEST = 13;
*
*
* could not deserilize rpc request
*
*/
public static final int FATAL_DESERIALIZING_REQUEST_VALUE = 13;
/**
* FATAL_VERSION_MISMATCH = 14;
*
*
* Ipc Layer version mismatch
*
*/
public static final int FATAL_VERSION_MISMATCH_VALUE = 14;
/**
* FATAL_UNAUTHORIZED = 15;
*
*
* Auth failed
*
*/
public static final int FATAL_UNAUTHORIZED_VALUE = 15;
public final int getNumber() { return value; }
public static RpcErrorCodeProto valueOf(int value) {
switch (value) {
case 1: return ERROR_APPLICATION;
case 2: return ERROR_NO_SUCH_METHOD;
case 3: return ERROR_NO_SUCH_PROTOCOL;
case 4: return ERROR_RPC_SERVER;
case 5: return ERROR_SERIALIZING_RESPONSE;
case 6: return ERROR_RPC_VERSION_MISMATCH;
case 10: return FATAL_UNKNOWN;
case 11: return FATAL_UNSUPPORTED_SERIALIZATION;
case 12: return FATAL_INVALID_RPC_HEADER;
case 13: return FATAL_DESERIALIZING_REQUEST;
case 14: return FATAL_VERSION_MISMATCH;
case 15: return FATAL_UNAUTHORIZED;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static com.google.protobuf.Internal.EnumLiteMap
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap() {
public RpcErrorCodeProto findValueByNumber(int number) {
return RpcErrorCodeProto.valueOf(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(index);
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.getDescriptor().getEnumTypes().get(1);
}
private static final RpcErrorCodeProto[] VALUES = values();
public static RpcErrorCodeProto valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int index;
private final int value;
private RpcErrorCodeProto(int index, int value) {
this.index = index;
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto)
}
private int bitField0_;
// required uint32 callId = 1;
public static final int CALLID_FIELD_NUMBER = 1;
private int callId_;
/**
* required uint32 callId = 1;
*
*
* callId used in Request
*
*/
public boolean hasCallId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* required uint32 callId = 1;
*
*
* callId used in Request
*
*/
public int getCallId() {
return callId_;
}
// required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
public static final int STATUS_FIELD_NUMBER = 2;
private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto status_;
/**
* required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
*/
public boolean hasStatus() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto getStatus() {
return status_;
}
// optional uint32 serverIpcVersionNum = 3;
public static final int SERVERIPCVERSIONNUM_FIELD_NUMBER = 3;
private int serverIpcVersionNum_;
/**
* optional uint32 serverIpcVersionNum = 3;
*
*
* Sent if success or fail
*
*/
public boolean hasServerIpcVersionNum() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* optional uint32 serverIpcVersionNum = 3;
*
*
* Sent if success or fail
*
*/
public int getServerIpcVersionNum() {
return serverIpcVersionNum_;
}
// optional string exceptionClassName = 4;
public static final int EXCEPTIONCLASSNAME_FIELD_NUMBER = 4;
private java.lang.Object exceptionClassName_;
/**
* optional string exceptionClassName = 4;
*
*
* if request fails
*
*/
public boolean hasExceptionClassName() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* optional string exceptionClassName = 4;
*
*
* if request fails
*
*/
public java.lang.String getExceptionClassName() {
java.lang.Object ref = exceptionClassName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
exceptionClassName_ = s;
}
return s;
}
}
/**
* optional string exceptionClassName = 4;
*
*
* if request fails
*
*/
public com.google.protobuf.ByteString
getExceptionClassNameBytes() {
java.lang.Object ref = exceptionClassName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
exceptionClassName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional string errorMsg = 5;
public static final int ERRORMSG_FIELD_NUMBER = 5;
private java.lang.Object errorMsg_;
/**
* optional string errorMsg = 5;
*
*
* if request fails, often contains strack trace
*
*/
public boolean hasErrorMsg() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* optional string errorMsg = 5;
*
*
* if request fails, often contains strack trace
*
*/
public java.lang.String getErrorMsg() {
java.lang.Object ref = errorMsg_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
errorMsg_ = s;
}
return s;
}
}
/**
* optional string errorMsg = 5;
*
*
* if request fails, often contains strack trace
*
*/
public com.google.protobuf.ByteString
getErrorMsgBytes() {
java.lang.Object ref = errorMsg_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
errorMsg_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
public static final int ERRORDETAIL_FIELD_NUMBER = 6;
private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail_;
/**
* optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
*
*
* in case of error
*
*/
public boolean hasErrorDetail() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
*
*
* in case of error
*
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto getErrorDetail() {
return errorDetail_;
}
// optional bytes clientId = 7;
public static final int CLIENTID_FIELD_NUMBER = 7;
private com.google.protobuf.ByteString clientId_;
/**
* optional bytes clientId = 7;
*
*
* Globally unique client ID
*
*/
public boolean hasClientId() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* optional bytes clientId = 7;
*
*
* Globally unique client ID
*
*/
public com.google.protobuf.ByteString getClientId() {
return clientId_;
}
// optional sint32 retryCount = 8 [default = -1];
public static final int RETRYCOUNT_FIELD_NUMBER = 8;
private int retryCount_;
/**
* optional sint32 retryCount = 8 [default = -1];
*/
public boolean hasRetryCount() {
return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
* optional sint32 retryCount = 8 [default = -1];
*/
public int getRetryCount() {
return retryCount_;
}
private void initFields() {
callId_ = 0;
status_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.SUCCESS;
serverIpcVersionNum_ = 0;
exceptionClassName_ = "";
errorMsg_ = "";
errorDetail_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.ERROR_APPLICATION;
clientId_ = com.google.protobuf.ByteString.EMPTY;
retryCount_ = -1;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasCallId()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasStatus()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeUInt32(1, callId_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeEnum(2, status_.getNumber());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeUInt32(3, serverIpcVersionNum_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeBytes(4, getExceptionClassNameBytes());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeBytes(5, getErrorMsgBytes());
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
output.writeEnum(6, errorDetail_.getNumber());
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
output.writeBytes(7, clientId_);
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
output.writeSInt32(8, retryCount_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt32Size(1, callId_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(2, status_.getNumber());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt32Size(3, serverIpcVersionNum_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(4, getExceptionClassNameBytes());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(5, getErrorMsgBytes());
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(6, errorDetail_.getNumber());
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(7, clientId_);
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
size += com.google.protobuf.CodedOutputStream
.computeSInt32Size(8, retryCount_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto)) {
return super.equals(obj);
}
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto) obj;
boolean result = true;
result = result && (hasCallId() == other.hasCallId());
if (hasCallId()) {
result = result && (getCallId()
== other.getCallId());
}
result = result && (hasStatus() == other.hasStatus());
if (hasStatus()) {
result = result &&
(getStatus() == other.getStatus());
}
result = result && (hasServerIpcVersionNum() == other.hasServerIpcVersionNum());
if (hasServerIpcVersionNum()) {
result = result && (getServerIpcVersionNum()
== other.getServerIpcVersionNum());
}
result = result && (hasExceptionClassName() == other.hasExceptionClassName());
if (hasExceptionClassName()) {
result = result && getExceptionClassName()
.equals(other.getExceptionClassName());
}
result = result && (hasErrorMsg() == other.hasErrorMsg());
if (hasErrorMsg()) {
result = result && getErrorMsg()
.equals(other.getErrorMsg());
}
result = result && (hasErrorDetail() == other.hasErrorDetail());
if (hasErrorDetail()) {
result = result &&
(getErrorDetail() == other.getErrorDetail());
}
result = result && (hasClientId() == other.hasClientId());
if (hasClientId()) {
result = result && getClientId()
.equals(other.getClientId());
}
result = result && (hasRetryCount() == other.hasRetryCount());
if (hasRetryCount()) {
result = result && (getRetryCount()
== other.getRetryCount());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasCallId()) {
hash = (37 * hash) + CALLID_FIELD_NUMBER;
hash = (53 * hash) + getCallId();
}
if (hasStatus()) {
hash = (37 * hash) + STATUS_FIELD_NUMBER;
hash = (53 * hash) + hashEnum(getStatus());
}
if (hasServerIpcVersionNum()) {
hash = (37 * hash) + SERVERIPCVERSIONNUM_FIELD_NUMBER;
hash = (53 * hash) + getServerIpcVersionNum();
}
if (hasExceptionClassName()) {
hash = (37 * hash) + EXCEPTIONCLASSNAME_FIELD_NUMBER;
hash = (53 * hash) + getExceptionClassName().hashCode();
}
if (hasErrorMsg()) {
hash = (37 * hash) + ERRORMSG_FIELD_NUMBER;
hash = (53 * hash) + getErrorMsg().hashCode();
}
if (hasErrorDetail()) {
hash = (37 * hash) + ERRORDETAIL_FIELD_NUMBER;
hash = (53 * hash) + hashEnum(getErrorDetail());
}
if (hasClientId()) {
hash = (37 * hash) + CLIENTID_FIELD_NUMBER;
hash = (53 * hash) + getClientId().hashCode();
}
if (hasRetryCount()) {
hash = (37 * hash) + RETRYCOUNT_FIELD_NUMBER;
hash = (53 * hash) + getRetryCount();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.common.RpcResponseHeaderProto}
*
*
**
* Rpc Response Header
* +------------------------------------------------------------------+
* | Rpc total response length in bytes (4 bytes int) |
* | (sum of next two parts) |
* +------------------------------------------------------------------+
* | RpcResponseHeaderProto - serialized delimited ie has len |
* +------------------------------------------------------------------+
* | if request is successful: |
* | - RpcResponse - The actual rpc response bytes follow |
* | the response header |
* | This response is serialized based on RpcKindProto |
* | if request fails : |
* | The rpc response header contains the necessary info |
* +------------------------------------------------------------------+
*
* Note that rpc response header is also used when connection setup fails.
* Ie the response looks like a rpc response with a fake callId.
*
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder
implements org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.Builder.class);
}
// Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
callId_ = 0;
bitField0_ = (bitField0_ & ~0x00000001);
status_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.SUCCESS;
bitField0_ = (bitField0_ & ~0x00000002);
serverIpcVersionNum_ = 0;
bitField0_ = (bitField0_ & ~0x00000004);
exceptionClassName_ = "";
bitField0_ = (bitField0_ & ~0x00000008);
errorMsg_ = "";
bitField0_ = (bitField0_ & ~0x00000010);
errorDetail_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.ERROR_APPLICATION;
bitField0_ = (bitField0_ & ~0x00000020);
clientId_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000040);
retryCount_ = -1;
bitField0_ = (bitField0_ & ~0x00000080);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_descriptor;
}
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto getDefaultInstanceForType() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.getDefaultInstance();
}
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto build() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto buildPartial() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.callId_ = callId_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.status_ = status_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.serverIpcVersionNum_ = serverIpcVersionNum_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.exceptionClassName_ = exceptionClassName_;
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
result.errorMsg_ = errorMsg_;
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000020;
}
result.errorDetail_ = errorDetail_;
if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
to_bitField0_ |= 0x00000040;
}
result.clientId_ = clientId_;
if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
to_bitField0_ |= 0x00000080;
}
result.retryCount_ = retryCount_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto) {
return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto other) {
if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.getDefaultInstance()) return this;
if (other.hasCallId()) {
setCallId(other.getCallId());
}
if (other.hasStatus()) {
setStatus(other.getStatus());
}
if (other.hasServerIpcVersionNum()) {
setServerIpcVersionNum(other.getServerIpcVersionNum());
}
if (other.hasExceptionClassName()) {
bitField0_ |= 0x00000008;
exceptionClassName_ = other.exceptionClassName_;
onChanged();
}
if (other.hasErrorMsg()) {
bitField0_ |= 0x00000010;
errorMsg_ = other.errorMsg_;
onChanged();
}
if (other.hasErrorDetail()) {
setErrorDetail(other.getErrorDetail());
}
if (other.hasClientId()) {
setClientId(other.getClientId());
}
if (other.hasRetryCount()) {
setRetryCount(other.getRetryCount());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasCallId()) {
return false;
}
if (!hasStatus()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required uint32 callId = 1;
private int callId_ ;
/**
* required uint32 callId = 1;
*
*
* callId used in Request
*
*/
public boolean hasCallId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* required uint32 callId = 1;
*
*
* callId used in Request
*
*/
public int getCallId() {
return callId_;
}
/**
* required uint32 callId = 1;
*
*
* callId used in Request
*
*/
public Builder setCallId(int value) {
bitField0_ |= 0x00000001;
callId_ = value;
onChanged();
return this;
}
/**
* required uint32 callId = 1;
*
*
* callId used in Request
*
*/
public Builder clearCallId() {
bitField0_ = (bitField0_ & ~0x00000001);
callId_ = 0;
onChanged();
return this;
}
// required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto status_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.SUCCESS;
/**
* required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
*/
public boolean hasStatus() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto getStatus() {
return status_;
}
/**
* required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
*/
public Builder setStatus(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
status_ = value;
onChanged();
return this;
}
/**
* required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
*/
public Builder clearStatus() {
bitField0_ = (bitField0_ & ~0x00000002);
status_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.SUCCESS;
onChanged();
return this;
}
// optional uint32 serverIpcVersionNum = 3;
private int serverIpcVersionNum_ ;
/**
* optional uint32 serverIpcVersionNum = 3;
*
*
* Sent if success or fail
*
*/
public boolean hasServerIpcVersionNum() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* optional uint32 serverIpcVersionNum = 3;
*
*
* Sent if success or fail
*
*/
public int getServerIpcVersionNum() {
return serverIpcVersionNum_;
}
/**
* optional uint32 serverIpcVersionNum = 3;
*
*
* Sent if success or fail
*
*/
public Builder setServerIpcVersionNum(int value) {
bitField0_ |= 0x00000004;
serverIpcVersionNum_ = value;
onChanged();
return this;
}
/**
* optional uint32 serverIpcVersionNum = 3;
*
*
* Sent if success or fail
*
*/
public Builder clearServerIpcVersionNum() {
bitField0_ = (bitField0_ & ~0x00000004);
serverIpcVersionNum_ = 0;
onChanged();
return this;
}
// optional string exceptionClassName = 4;
private java.lang.Object exceptionClassName_ = "";
/**
* optional string exceptionClassName = 4;
*
*
* if request fails
*
*/
public boolean hasExceptionClassName() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* optional string exceptionClassName = 4;
*
*
* if request fails
*
*/
public java.lang.String getExceptionClassName() {
java.lang.Object ref = exceptionClassName_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
exceptionClassName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string exceptionClassName = 4;
*
*
* if request fails
*
*/
public com.google.protobuf.ByteString
getExceptionClassNameBytes() {
java.lang.Object ref = exceptionClassName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
exceptionClassName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* optional string exceptionClassName = 4;
*
*
* if request fails
*
*/
public Builder setExceptionClassName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
exceptionClassName_ = value;
onChanged();
return this;
}
/**
* optional string exceptionClassName = 4;
*
*
* if request fails
*
*/
public Builder clearExceptionClassName() {
bitField0_ = (bitField0_ & ~0x00000008);
exceptionClassName_ = getDefaultInstance().getExceptionClassName();
onChanged();
return this;
}
/**
* optional string exceptionClassName = 4;
*
*
* if request fails
*
*/
public Builder setExceptionClassNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
exceptionClassName_ = value;
onChanged();
return this;
}
// optional string errorMsg = 5;
private java.lang.Object errorMsg_ = "";
/**
* optional string errorMsg = 5;
*
*
* if request fails, often contains strack trace
*
*/
public boolean hasErrorMsg() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* optional string errorMsg = 5;
*
*
* if request fails, often contains strack trace
*
*/
public java.lang.String getErrorMsg() {
java.lang.Object ref = errorMsg_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
errorMsg_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string errorMsg = 5;
*
*
* if request fails, often contains strack trace
*
*/
public com.google.protobuf.ByteString
getErrorMsgBytes() {
java.lang.Object ref = errorMsg_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
errorMsg_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* optional string errorMsg = 5;
*
*
* if request fails, often contains strack trace
*
*/
public Builder setErrorMsg(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
errorMsg_ = value;
onChanged();
return this;
}
/**
* optional string errorMsg = 5;
*
*
* if request fails, often contains strack trace
*
*/
public Builder clearErrorMsg() {
bitField0_ = (bitField0_ & ~0x00000010);
errorMsg_ = getDefaultInstance().getErrorMsg();
onChanged();
return this;
}
/**
* optional string errorMsg = 5;
*
*
* if request fails, often contains strack trace
*
*/
public Builder setErrorMsgBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
errorMsg_ = value;
onChanged();
return this;
}
// optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.ERROR_APPLICATION;
/**
* optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
*
*
* in case of error
*
*/
public boolean hasErrorDetail() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
*
*
* in case of error
*
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto getErrorDetail() {
return errorDetail_;
}
/**
* optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
*
*
* in case of error
*
*/
public Builder setErrorDetail(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000020;
errorDetail_ = value;
onChanged();
return this;
}
/**
* optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
*
*
* in case of error
*
*/
public Builder clearErrorDetail() {
bitField0_ = (bitField0_ & ~0x00000020);
errorDetail_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.ERROR_APPLICATION;
onChanged();
return this;
}
// optional bytes clientId = 7;
private com.google.protobuf.ByteString clientId_ = com.google.protobuf.ByteString.EMPTY;
/**
* optional bytes clientId = 7;
*
*
* Globally unique client ID
*
*/
public boolean hasClientId() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* optional bytes clientId = 7;
*
*
* Globally unique client ID
*
*/
public com.google.protobuf.ByteString getClientId() {
return clientId_;
}
/**
* optional bytes clientId = 7;
*
*
* Globally unique client ID
*
*/
public Builder setClientId(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000040;
clientId_ = value;
onChanged();
return this;
}
/**
* optional bytes clientId = 7;
*
*
* Globally unique client ID
*
*/
public Builder clearClientId() {
bitField0_ = (bitField0_ & ~0x00000040);
clientId_ = getDefaultInstance().getClientId();
onChanged();
return this;
}
// optional sint32 retryCount = 8 [default = -1];
private int retryCount_ = -1;
/**
* optional sint32 retryCount = 8 [default = -1];
*/
public boolean hasRetryCount() {
return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
* optional sint32 retryCount = 8 [default = -1];
*/
public int getRetryCount() {
return retryCount_;
}
/**
* optional sint32 retryCount = 8 [default = -1];
*/
public Builder setRetryCount(int value) {
bitField0_ |= 0x00000080;
retryCount_ = value;
onChanged();
return this;
}
/**
* optional sint32 retryCount = 8 [default = -1];
*/
public Builder clearRetryCount() {
bitField0_ = (bitField0_ & ~0x00000080);
retryCount_ = -1;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hadoop.common.RpcResponseHeaderProto)
}
static {
defaultInstance = new RpcResponseHeaderProto(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hadoop.common.RpcResponseHeaderProto)
}
public interface RpcSaslProtoOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional uint32 version = 1;
/**
* optional uint32 version = 1;
*/
boolean hasVersion();
/**
* optional uint32 version = 1;
*/
int getVersion();
// required .hadoop.common.RpcSaslProto.SaslState state = 2;
/**
* required .hadoop.common.RpcSaslProto.SaslState state = 2;
*/
boolean hasState();
/**
* required .hadoop.common.RpcSaslProto.SaslState state = 2;
*/
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState getState();
// optional bytes token = 3;
/**
* optional bytes token = 3;
*/
boolean hasToken();
/**
* optional bytes token = 3;
*/
com.google.protobuf.ByteString getToken();
// repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
java.util.List
getAuthsList();
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth getAuths(int index);
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
int getAuthsCount();
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
java.util.List extends org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder>
getAuthsOrBuilderList();
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder getAuthsOrBuilder(
int index);
}
/**
* Protobuf type {@code hadoop.common.RpcSaslProto}
*/
public static final class RpcSaslProto extends
com.google.protobuf.GeneratedMessage
implements RpcSaslProtoOrBuilder {
// Use RpcSaslProto.newBuilder() to construct.
private RpcSaslProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private RpcSaslProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final RpcSaslProto defaultInstance;
public static RpcSaslProto getDefaultInstance() {
return defaultInstance;
}
public RpcSaslProto getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private RpcSaslProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
version_ = input.readUInt32();
break;
}
case 16: {
int rawValue = input.readEnum();
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState value = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(2, rawValue);
} else {
bitField0_ |= 0x00000002;
state_ = value;
}
break;
}
case 26: {
bitField0_ |= 0x00000004;
token_ = input.readBytes();
break;
}
case 34: {
if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
auths_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000008;
}
auths_.add(input.readMessage(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
auths_ = java.util.Collections.unmodifiableList(auths_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.Builder.class);
}
public static com.google.protobuf.Parser PARSER =
new com.google.protobuf.AbstractParser() {
public RpcSaslProto parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RpcSaslProto(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
/**
* Protobuf enum {@code hadoop.common.RpcSaslProto.SaslState}
*/
public enum SaslState
implements com.google.protobuf.ProtocolMessageEnum {
/**
* SUCCESS = 0;
*/
SUCCESS(0, 0),
/**
* NEGOTIATE = 1;
*/
NEGOTIATE(1, 1),
/**
* INITIATE = 2;
*/
INITIATE(2, 2),
/**
* CHALLENGE = 3;
*/
CHALLENGE(3, 3),
/**
* RESPONSE = 4;
*/
RESPONSE(4, 4),
/**
* WRAP = 5;
*/
WRAP(5, 5),
;
/**
* SUCCESS = 0;
*/
public static final int SUCCESS_VALUE = 0;
/**
* NEGOTIATE = 1;
*/
public static final int NEGOTIATE_VALUE = 1;
/**
* INITIATE = 2;
*/
public static final int INITIATE_VALUE = 2;
/**
* CHALLENGE = 3;
*/
public static final int CHALLENGE_VALUE = 3;
/**
* RESPONSE = 4;
*/
public static final int RESPONSE_VALUE = 4;
/**
* WRAP = 5;
*/
public static final int WRAP_VALUE = 5;
public final int getNumber() { return value; }
public static SaslState valueOf(int value) {
switch (value) {
case 0: return SUCCESS;
case 1: return NEGOTIATE;
case 2: return INITIATE;
case 3: return CHALLENGE;
case 4: return RESPONSE;
case 5: return WRAP;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static com.google.protobuf.Internal.EnumLiteMap
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap() {
public SaslState findValueByNumber(int number) {
return SaslState.valueOf(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(index);
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.getDescriptor().getEnumTypes().get(0);
}
private static final SaslState[] VALUES = values();
public static SaslState valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int index;
private final int value;
private SaslState(int index, int value) {
this.index = index;
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hadoop.common.RpcSaslProto.SaslState)
}
public interface SaslAuthOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string method = 1;
/**
* required string method = 1;
*/
boolean hasMethod();
/**
* required string method = 1;
*/
java.lang.String getMethod();
/**
* required string method = 1;
*/
com.google.protobuf.ByteString
getMethodBytes();
// required string mechanism = 2;
/**
* required string mechanism = 2;
*/
boolean hasMechanism();
/**
* required string mechanism = 2;
*/
java.lang.String getMechanism();
/**
* required string mechanism = 2;
*/
com.google.protobuf.ByteString
getMechanismBytes();
// optional string protocol = 3;
/**
* optional string protocol = 3;
*/
boolean hasProtocol();
/**
* optional string protocol = 3;
*/
java.lang.String getProtocol();
/**
* optional string protocol = 3;
*/
com.google.protobuf.ByteString
getProtocolBytes();
// optional string serverId = 4;
/**
* optional string serverId = 4;
*/
boolean hasServerId();
/**
* optional string serverId = 4;
*/
java.lang.String getServerId();
/**
* optional string serverId = 4;
*/
com.google.protobuf.ByteString
getServerIdBytes();
// optional bytes challenge = 5;
/**
* optional bytes challenge = 5;
*/
boolean hasChallenge();
/**
* optional bytes challenge = 5;
*/
com.google.protobuf.ByteString getChallenge();
}
/**
* Protobuf type {@code hadoop.common.RpcSaslProto.SaslAuth}
*/
public static final class SaslAuth extends
com.google.protobuf.GeneratedMessage
implements SaslAuthOrBuilder {
// Use SaslAuth.newBuilder() to construct.
private SaslAuth(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private SaslAuth(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final SaslAuth defaultInstance;
public static SaslAuth getDefaultInstance() {
return defaultInstance;
}
public SaslAuth getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SaslAuth(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
method_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
mechanism_ = input.readBytes();
break;
}
case 26: {
bitField0_ |= 0x00000004;
protocol_ = input.readBytes();
break;
}
case 34: {
bitField0_ |= 0x00000008;
serverId_ = input.readBytes();
break;
}
case 42: {
bitField0_ |= 0x00000010;
challenge_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder.class);
}
public static com.google.protobuf.Parser PARSER =
new com.google.protobuf.AbstractParser() {
public SaslAuth parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new SaslAuth(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
private int bitField0_;
// required string method = 1;
public static final int METHOD_FIELD_NUMBER = 1;
private java.lang.Object method_;
/**
* required string method = 1;
*/
public boolean hasMethod() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* required string method = 1;
*/
public java.lang.String getMethod() {
java.lang.Object ref = method_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
method_ = s;
}
return s;
}
}
/**
* required string method = 1;
*/
public com.google.protobuf.ByteString
getMethodBytes() {
java.lang.Object ref = method_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
method_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// required string mechanism = 2;
public static final int MECHANISM_FIELD_NUMBER = 2;
private java.lang.Object mechanism_;
/**
* required string mechanism = 2;
*/
public boolean hasMechanism() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* required string mechanism = 2;
*/
public java.lang.String getMechanism() {
java.lang.Object ref = mechanism_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
mechanism_ = s;
}
return s;
}
}
/**
* required string mechanism = 2;
*/
public com.google.protobuf.ByteString
getMechanismBytes() {
java.lang.Object ref = mechanism_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
mechanism_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional string protocol = 3;
public static final int PROTOCOL_FIELD_NUMBER = 3;
private java.lang.Object protocol_;
/**
* optional string protocol = 3;
*/
public boolean hasProtocol() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* optional string protocol = 3;
*/
public java.lang.String getProtocol() {
java.lang.Object ref = protocol_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
protocol_ = s;
}
return s;
}
}
/**
* optional string protocol = 3;
*/
public com.google.protobuf.ByteString
getProtocolBytes() {
java.lang.Object ref = protocol_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
protocol_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional string serverId = 4;
public static final int SERVERID_FIELD_NUMBER = 4;
private java.lang.Object serverId_;
/**
* optional string serverId = 4;
*/
public boolean hasServerId() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* optional string serverId = 4;
*/
public java.lang.String getServerId() {
java.lang.Object ref = serverId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
serverId_ = s;
}
return s;
}
}
/**
* optional string serverId = 4;
*/
public com.google.protobuf.ByteString
getServerIdBytes() {
java.lang.Object ref = serverId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
serverId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional bytes challenge = 5;
public static final int CHALLENGE_FIELD_NUMBER = 5;
private com.google.protobuf.ByteString challenge_;
/**
* optional bytes challenge = 5;
*/
public boolean hasChallenge() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* optional bytes challenge = 5;
*/
public com.google.protobuf.ByteString getChallenge() {
return challenge_;
}
private void initFields() {
method_ = "";
mechanism_ = "";
protocol_ = "";
serverId_ = "";
challenge_ = com.google.protobuf.ByteString.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasMethod()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasMechanism()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getMethodBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, getMechanismBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBytes(3, getProtocolBytes());
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeBytes(4, getServerIdBytes());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeBytes(5, challenge_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getMethodBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getMechanismBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, getProtocolBytes());
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(4, getServerIdBytes());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(5, challenge_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth)) {
return super.equals(obj);
}
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth) obj;
boolean result = true;
result = result && (hasMethod() == other.hasMethod());
if (hasMethod()) {
result = result && getMethod()
.equals(other.getMethod());
}
result = result && (hasMechanism() == other.hasMechanism());
if (hasMechanism()) {
result = result && getMechanism()
.equals(other.getMechanism());
}
result = result && (hasProtocol() == other.hasProtocol());
if (hasProtocol()) {
result = result && getProtocol()
.equals(other.getProtocol());
}
result = result && (hasServerId() == other.hasServerId());
if (hasServerId()) {
result = result && getServerId()
.equals(other.getServerId());
}
result = result && (hasChallenge() == other.hasChallenge());
if (hasChallenge()) {
result = result && getChallenge()
.equals(other.getChallenge());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasMethod()) {
hash = (37 * hash) + METHOD_FIELD_NUMBER;
hash = (53 * hash) + getMethod().hashCode();
}
if (hasMechanism()) {
hash = (37 * hash) + MECHANISM_FIELD_NUMBER;
hash = (53 * hash) + getMechanism().hashCode();
}
if (hasProtocol()) {
hash = (37 * hash) + PROTOCOL_FIELD_NUMBER;
hash = (53 * hash) + getProtocol().hashCode();
}
if (hasServerId()) {
hash = (37 * hash) + SERVERID_FIELD_NUMBER;
hash = (53 * hash) + getServerId().hashCode();
}
if (hasChallenge()) {
hash = (37 * hash) + CHALLENGE_FIELD_NUMBER;
hash = (53 * hash) + getChallenge().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.common.RpcSaslProto.SaslAuth}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder
implements org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder.class);
}
// Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
method_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
mechanism_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
protocol_ = "";
bitField0_ = (bitField0_ & ~0x00000004);
serverId_ = "";
bitField0_ = (bitField0_ & ~0x00000008);
challenge_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000010);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor;
}
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth getDefaultInstanceForType() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.getDefaultInstance();
}
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth build() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth buildPartial() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.method_ = method_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.mechanism_ = mechanism_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.protocol_ = protocol_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.serverId_ = serverId_;
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
result.challenge_ = challenge_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth) {
return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth other) {
if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.getDefaultInstance()) return this;
if (other.hasMethod()) {
bitField0_ |= 0x00000001;
method_ = other.method_;
onChanged();
}
if (other.hasMechanism()) {
bitField0_ |= 0x00000002;
mechanism_ = other.mechanism_;
onChanged();
}
if (other.hasProtocol()) {
bitField0_ |= 0x00000004;
protocol_ = other.protocol_;
onChanged();
}
if (other.hasServerId()) {
bitField0_ |= 0x00000008;
serverId_ = other.serverId_;
onChanged();
}
if (other.hasChallenge()) {
setChallenge(other.getChallenge());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasMethod()) {
return false;
}
if (!hasMechanism()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string method = 1;
private java.lang.Object method_ = "";
/**
* required string method = 1;
*/
public boolean hasMethod() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* required string method = 1;
*/
public java.lang.String getMethod() {
java.lang.Object ref = method_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
method_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string method = 1;
*/
public com.google.protobuf.ByteString
getMethodBytes() {
java.lang.Object ref = method_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
method_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* required string method = 1;
*/
public Builder setMethod(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
method_ = value;
onChanged();
return this;
}
/**
* required string method = 1;
*/
public Builder clearMethod() {
bitField0_ = (bitField0_ & ~0x00000001);
method_ = getDefaultInstance().getMethod();
onChanged();
return this;
}
/**
* required string method = 1;
*/
public Builder setMethodBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
method_ = value;
onChanged();
return this;
}
// required string mechanism = 2;
private java.lang.Object mechanism_ = "";
/**
* required string mechanism = 2;
*/
public boolean hasMechanism() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* required string mechanism = 2;
*/
public java.lang.String getMechanism() {
java.lang.Object ref = mechanism_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
mechanism_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string mechanism = 2;
*/
public com.google.protobuf.ByteString
getMechanismBytes() {
java.lang.Object ref = mechanism_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
mechanism_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* required string mechanism = 2;
*/
public Builder setMechanism(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
mechanism_ = value;
onChanged();
return this;
}
/**
* required string mechanism = 2;
*/
public Builder clearMechanism() {
bitField0_ = (bitField0_ & ~0x00000002);
mechanism_ = getDefaultInstance().getMechanism();
onChanged();
return this;
}
/**
* required string mechanism = 2;
*/
public Builder setMechanismBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
mechanism_ = value;
onChanged();
return this;
}
// optional string protocol = 3;
private java.lang.Object protocol_ = "";
/**
* optional string protocol = 3;
*/
public boolean hasProtocol() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* optional string protocol = 3;
*/
public java.lang.String getProtocol() {
java.lang.Object ref = protocol_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
protocol_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string protocol = 3;
*/
public com.google.protobuf.ByteString
getProtocolBytes() {
java.lang.Object ref = protocol_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
protocol_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* optional string protocol = 3;
*/
public Builder setProtocol(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
protocol_ = value;
onChanged();
return this;
}
/**
* optional string protocol = 3;
*/
public Builder clearProtocol() {
bitField0_ = (bitField0_ & ~0x00000004);
protocol_ = getDefaultInstance().getProtocol();
onChanged();
return this;
}
/**
* optional string protocol = 3;
*/
public Builder setProtocolBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
protocol_ = value;
onChanged();
return this;
}
// optional string serverId = 4;
private java.lang.Object serverId_ = "";
/**
* optional string serverId = 4;
*/
public boolean hasServerId() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* optional string serverId = 4;
*/
public java.lang.String getServerId() {
java.lang.Object ref = serverId_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
serverId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string serverId = 4;
*/
public com.google.protobuf.ByteString
getServerIdBytes() {
java.lang.Object ref = serverId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
serverId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* optional string serverId = 4;
*/
public Builder setServerId(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
serverId_ = value;
onChanged();
return this;
}
/**
* optional string serverId = 4;
*/
public Builder clearServerId() {
bitField0_ = (bitField0_ & ~0x00000008);
serverId_ = getDefaultInstance().getServerId();
onChanged();
return this;
}
/**
* optional string serverId = 4;
*/
public Builder setServerIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
serverId_ = value;
onChanged();
return this;
}
// optional bytes challenge = 5;
private com.google.protobuf.ByteString challenge_ = com.google.protobuf.ByteString.EMPTY;
/**
* optional bytes challenge = 5;
*/
public boolean hasChallenge() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* optional bytes challenge = 5;
*/
public com.google.protobuf.ByteString getChallenge() {
return challenge_;
}
/**
* optional bytes challenge = 5;
*/
public Builder setChallenge(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
challenge_ = value;
onChanged();
return this;
}
/**
* optional bytes challenge = 5;
*/
public Builder clearChallenge() {
bitField0_ = (bitField0_ & ~0x00000010);
challenge_ = getDefaultInstance().getChallenge();
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hadoop.common.RpcSaslProto.SaslAuth)
}
static {
defaultInstance = new SaslAuth(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hadoop.common.RpcSaslProto.SaslAuth)
}
private int bitField0_;
// optional uint32 version = 1;
public static final int VERSION_FIELD_NUMBER = 1;
private int version_;
/**
* optional uint32 version = 1;
*/
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* optional uint32 version = 1;
*/
public int getVersion() {
return version_;
}
// required .hadoop.common.RpcSaslProto.SaslState state = 2;
public static final int STATE_FIELD_NUMBER = 2;
private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState state_;
/**
* required .hadoop.common.RpcSaslProto.SaslState state = 2;
*/
public boolean hasState() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* required .hadoop.common.RpcSaslProto.SaslState state = 2;
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState getState() {
return state_;
}
// optional bytes token = 3;
public static final int TOKEN_FIELD_NUMBER = 3;
private com.google.protobuf.ByteString token_;
/**
* optional bytes token = 3;
*/
public boolean hasToken() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* optional bytes token = 3;
*/
public com.google.protobuf.ByteString getToken() {
return token_;
}
// repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
public static final int AUTHS_FIELD_NUMBER = 4;
private java.util.List auths_;
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public java.util.List getAuthsList() {
return auths_;
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public java.util.List extends org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder>
getAuthsOrBuilderList() {
return auths_;
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public int getAuthsCount() {
return auths_.size();
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth getAuths(int index) {
return auths_.get(index);
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder getAuthsOrBuilder(
int index) {
return auths_.get(index);
}
private void initFields() {
version_ = 0;
state_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.SUCCESS;
token_ = com.google.protobuf.ByteString.EMPTY;
auths_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasState()) {
memoizedIsInitialized = 0;
return false;
}
for (int i = 0; i < getAuthsCount(); i++) {
if (!getAuths(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeUInt32(1, version_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeEnum(2, state_.getNumber());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBytes(3, token_);
}
for (int i = 0; i < auths_.size(); i++) {
output.writeMessage(4, auths_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt32Size(1, version_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(2, state_.getNumber());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, token_);
}
for (int i = 0; i < auths_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(4, auths_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto)) {
return super.equals(obj);
}
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto) obj;
boolean result = true;
result = result && (hasVersion() == other.hasVersion());
if (hasVersion()) {
result = result && (getVersion()
== other.getVersion());
}
result = result && (hasState() == other.hasState());
if (hasState()) {
result = result &&
(getState() == other.getState());
}
result = result && (hasToken() == other.hasToken());
if (hasToken()) {
result = result && getToken()
.equals(other.getToken());
}
result = result && getAuthsList()
.equals(other.getAuthsList());
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasVersion()) {
hash = (37 * hash) + VERSION_FIELD_NUMBER;
hash = (53 * hash) + getVersion();
}
if (hasState()) {
hash = (37 * hash) + STATE_FIELD_NUMBER;
hash = (53 * hash) + hashEnum(getState());
}
if (hasToken()) {
hash = (37 * hash) + TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getToken().hashCode();
}
if (getAuthsCount() > 0) {
hash = (37 * hash) + AUTHS_FIELD_NUMBER;
hash = (53 * hash) + getAuthsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.common.RpcSaslProto}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder
implements org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.Builder.class);
}
// Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getAuthsFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
version_ = 0;
bitField0_ = (bitField0_ & ~0x00000001);
state_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.SUCCESS;
bitField0_ = (bitField0_ & ~0x00000002);
token_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000004);
if (authsBuilder_ == null) {
auths_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
} else {
authsBuilder_.clear();
}
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_descriptor;
}
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto getDefaultInstanceForType() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.getDefaultInstance();
}
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto build() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto buildPartial() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.version_ = version_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.state_ = state_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.token_ = token_;
if (authsBuilder_ == null) {
if (((bitField0_ & 0x00000008) == 0x00000008)) {
auths_ = java.util.Collections.unmodifiableList(auths_);
bitField0_ = (bitField0_ & ~0x00000008);
}
result.auths_ = auths_;
} else {
result.auths_ = authsBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto) {
return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto other) {
if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.getDefaultInstance()) return this;
if (other.hasVersion()) {
setVersion(other.getVersion());
}
if (other.hasState()) {
setState(other.getState());
}
if (other.hasToken()) {
setToken(other.getToken());
}
if (authsBuilder_ == null) {
if (!other.auths_.isEmpty()) {
if (auths_.isEmpty()) {
auths_ = other.auths_;
bitField0_ = (bitField0_ & ~0x00000008);
} else {
ensureAuthsIsMutable();
auths_.addAll(other.auths_);
}
onChanged();
}
} else {
if (!other.auths_.isEmpty()) {
if (authsBuilder_.isEmpty()) {
authsBuilder_.dispose();
authsBuilder_ = null;
auths_ = other.auths_;
bitField0_ = (bitField0_ & ~0x00000008);
authsBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getAuthsFieldBuilder() : null;
} else {
authsBuilder_.addAllMessages(other.auths_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasState()) {
return false;
}
for (int i = 0; i < getAuthsCount(); i++) {
if (!getAuths(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional uint32 version = 1;
private int version_ ;
/**
* optional uint32 version = 1;
*/
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* optional uint32 version = 1;
*/
public int getVersion() {
return version_;
}
/**
* optional uint32 version = 1;
*/
public Builder setVersion(int value) {
bitField0_ |= 0x00000001;
version_ = value;
onChanged();
return this;
}
/**
* optional uint32 version = 1;
*/
public Builder clearVersion() {
bitField0_ = (bitField0_ & ~0x00000001);
version_ = 0;
onChanged();
return this;
}
// required .hadoop.common.RpcSaslProto.SaslState state = 2;
private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState state_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.SUCCESS;
/**
* required .hadoop.common.RpcSaslProto.SaslState state = 2;
*/
public boolean hasState() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* required .hadoop.common.RpcSaslProto.SaslState state = 2;
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState getState() {
return state_;
}
/**
* required .hadoop.common.RpcSaslProto.SaslState state = 2;
*/
public Builder setState(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
state_ = value;
onChanged();
return this;
}
/**
* required .hadoop.common.RpcSaslProto.SaslState state = 2;
*/
public Builder clearState() {
bitField0_ = (bitField0_ & ~0x00000002);
state_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.SUCCESS;
onChanged();
return this;
}
// optional bytes token = 3;
private com.google.protobuf.ByteString token_ = com.google.protobuf.ByteString.EMPTY;
/**
* optional bytes token = 3;
*/
public boolean hasToken() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* optional bytes token = 3;
*/
public com.google.protobuf.ByteString getToken() {
return token_;
}
/**
* optional bytes token = 3;
*/
public Builder setToken(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
token_ = value;
onChanged();
return this;
}
/**
* optional bytes token = 3;
*/
public Builder clearToken() {
bitField0_ = (bitField0_ & ~0x00000004);
token_ = getDefaultInstance().getToken();
onChanged();
return this;
}
// repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
private java.util.List auths_ =
java.util.Collections.emptyList();
private void ensureAuthsIsMutable() {
if (!((bitField0_ & 0x00000008) == 0x00000008)) {
auths_ = new java.util.ArrayList(auths_);
bitField0_ |= 0x00000008;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder> authsBuilder_;
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public java.util.List getAuthsList() {
if (authsBuilder_ == null) {
return java.util.Collections.unmodifiableList(auths_);
} else {
return authsBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public int getAuthsCount() {
if (authsBuilder_ == null) {
return auths_.size();
} else {
return authsBuilder_.getCount();
}
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth getAuths(int index) {
if (authsBuilder_ == null) {
return auths_.get(index);
} else {
return authsBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public Builder setAuths(
int index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth value) {
if (authsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAuthsIsMutable();
auths_.set(index, value);
onChanged();
} else {
authsBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public Builder setAuths(
int index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder builderForValue) {
if (authsBuilder_ == null) {
ensureAuthsIsMutable();
auths_.set(index, builderForValue.build());
onChanged();
} else {
authsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public Builder addAuths(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth value) {
if (authsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAuthsIsMutable();
auths_.add(value);
onChanged();
} else {
authsBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public Builder addAuths(
int index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth value) {
if (authsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAuthsIsMutable();
auths_.add(index, value);
onChanged();
} else {
authsBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public Builder addAuths(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder builderForValue) {
if (authsBuilder_ == null) {
ensureAuthsIsMutable();
auths_.add(builderForValue.build());
onChanged();
} else {
authsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public Builder addAuths(
int index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder builderForValue) {
if (authsBuilder_ == null) {
ensureAuthsIsMutable();
auths_.add(index, builderForValue.build());
onChanged();
} else {
authsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public Builder addAllAuths(
java.lang.Iterable extends org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth> values) {
if (authsBuilder_ == null) {
ensureAuthsIsMutable();
super.addAll(values, auths_);
onChanged();
} else {
authsBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public Builder clearAuths() {
if (authsBuilder_ == null) {
auths_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
} else {
authsBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public Builder removeAuths(int index) {
if (authsBuilder_ == null) {
ensureAuthsIsMutable();
auths_.remove(index);
onChanged();
} else {
authsBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder getAuthsBuilder(
int index) {
return getAuthsFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder getAuthsOrBuilder(
int index) {
if (authsBuilder_ == null) {
return auths_.get(index); } else {
return authsBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public java.util.List extends org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder>
getAuthsOrBuilderList() {
if (authsBuilder_ != null) {
return authsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(auths_);
}
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder addAuthsBuilder() {
return getAuthsFieldBuilder().addBuilder(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.getDefaultInstance());
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder addAuthsBuilder(
int index) {
return getAuthsFieldBuilder().addBuilder(
index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.getDefaultInstance());
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public java.util.List
getAuthsBuilderList() {
return getAuthsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder>
getAuthsFieldBuilder() {
if (authsBuilder_ == null) {
authsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder>(
auths_,
((bitField0_ & 0x00000008) == 0x00000008),
getParentForChildren(),
isClean());
auths_ = null;
}
return authsBuilder_;
}
// @@protoc_insertion_point(builder_scope:hadoop.common.RpcSaslProto)
}
static {
defaultInstance = new RpcSaslProto(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hadoop.common.RpcSaslProto)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_RPCTraceInfoProto_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hadoop_common_RPCTraceInfoProto_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_RPCCallerContextProto_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hadoop_common_RPCCallerContextProto_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_RpcRequestHeaderProto_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hadoop_common_RpcRequestHeaderProto_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_RpcResponseHeaderProto_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hadoop_common_RpcResponseHeaderProto_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_RpcSaslProto_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hadoop_common_RpcSaslProto_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hadoop_common_RpcSaslProto_SaslAuth_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\017RpcHeader.proto\022\rhadoop.common\"6\n\021RPCT" +
"raceInfoProto\022\017\n\007traceId\030\001 \001(\003\022\020\n\010parent" +
"Id\030\002 \001(\003\";\n\025RPCCallerContextProto\022\017\n\007con" +
"text\030\001 \002(\t\022\021\n\tsignature\030\002 \001(\014\"\224\003\n\025RpcReq" +
"uestHeaderProto\022,\n\007rpcKind\030\001 \001(\0162\033.hadoo" +
"p.common.RpcKindProto\022B\n\005rpcOp\030\002 \001(\01623.h" +
"adoop.common.RpcRequestHeaderProto.Opera" +
"tionProto\022\016\n\006callId\030\003 \002(\021\022\020\n\010clientId\030\004 " +
"\002(\014\022\026\n\nretryCount\030\005 \001(\021:\002-1\0223\n\ttraceInfo" +
"\030\006 \001(\0132 .hadoop.common.RPCTraceInfoProto",
"\022;\n\rcallerContext\030\007 \001(\0132$.hadoop.common." +
"RPCCallerContextProto\"]\n\016OperationProto\022" +
"\024\n\020RPC_FINAL_PACKET\020\000\022\033\n\027RPC_CONTINUATIO" +
"N_PACKET\020\001\022\030\n\024RPC_CLOSE_CONNECTION\020\002\"\312\005\n" +
"\026RpcResponseHeaderProto\022\016\n\006callId\030\001 \002(\r\022" +
"D\n\006status\030\002 \002(\01624.hadoop.common.RpcRespo" +
"nseHeaderProto.RpcStatusProto\022\033\n\023serverI" +
"pcVersionNum\030\003 \001(\r\022\032\n\022exceptionClassName" +
"\030\004 \001(\t\022\020\n\010errorMsg\030\005 \001(\t\022L\n\013errorDetail\030" +
"\006 \001(\01627.hadoop.common.RpcResponseHeaderP",
"roto.RpcErrorCodeProto\022\020\n\010clientId\030\007 \001(\014" +
"\022\026\n\nretryCount\030\010 \001(\021:\002-1\"3\n\016RpcStatusPro" +
"to\022\013\n\007SUCCESS\020\000\022\t\n\005ERROR\020\001\022\t\n\005FATAL\020\002\"\341\002" +
"\n\021RpcErrorCodeProto\022\025\n\021ERROR_APPLICATION" +
"\020\001\022\030\n\024ERROR_NO_SUCH_METHOD\020\002\022\032\n\026ERROR_NO" +
"_SUCH_PROTOCOL\020\003\022\024\n\020ERROR_RPC_SERVER\020\004\022\036" +
"\n\032ERROR_SERIALIZING_RESPONSE\020\005\022\036\n\032ERROR_" +
"RPC_VERSION_MISMATCH\020\006\022\021\n\rFATAL_UNKNOWN\020" +
"\n\022#\n\037FATAL_UNSUPPORTED_SERIALIZATION\020\013\022\034" +
"\n\030FATAL_INVALID_RPC_HEADER\020\014\022\037\n\033FATAL_DE",
"SERIALIZING_REQUEST\020\r\022\032\n\026FATAL_VERSION_M" +
"ISMATCH\020\016\022\026\n\022FATAL_UNAUTHORIZED\020\017\"\335\002\n\014Rp" +
"cSaslProto\022\017\n\007version\030\001 \001(\r\0224\n\005state\030\002 \002" +
"(\0162%.hadoop.common.RpcSaslProto.SaslStat" +
"e\022\r\n\005token\030\003 \001(\014\0223\n\005auths\030\004 \003(\0132$.hadoop" +
".common.RpcSaslProto.SaslAuth\032d\n\010SaslAut" +
"h\022\016\n\006method\030\001 \002(\t\022\021\n\tmechanism\030\002 \002(\t\022\020\n\010" +
"protocol\030\003 \001(\t\022\020\n\010serverId\030\004 \001(\t\022\021\n\tchal" +
"lenge\030\005 \001(\014\"\\\n\tSaslState\022\013\n\007SUCCESS\020\000\022\r\n" +
"\tNEGOTIATE\020\001\022\014\n\010INITIATE\020\002\022\r\n\tCHALLENGE\020",
"\003\022\014\n\010RESPONSE\020\004\022\010\n\004WRAP\020\005*J\n\014RpcKindProt" +
"o\022\017\n\013RPC_BUILTIN\020\000\022\020\n\014RPC_WRITABLE\020\001\022\027\n\023" +
"RPC_PROTOCOL_BUFFER\020\002B4\n\036org.apache.hado" +
"op.ipc.protobufB\017RpcHeaderProtos\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_hadoop_common_RPCTraceInfoProto_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_hadoop_common_RPCTraceInfoProto_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hadoop_common_RPCTraceInfoProto_descriptor,
new java.lang.String[] { "TraceId", "ParentId", });
internal_static_hadoop_common_RPCCallerContextProto_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_hadoop_common_RPCCallerContextProto_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hadoop_common_RPCCallerContextProto_descriptor,
new java.lang.String[] { "Context", "Signature", });
internal_static_hadoop_common_RpcRequestHeaderProto_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_hadoop_common_RpcRequestHeaderProto_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hadoop_common_RpcRequestHeaderProto_descriptor,
new java.lang.String[] { "RpcKind", "RpcOp", "CallId", "ClientId", "RetryCount", "TraceInfo", "CallerContext", });
internal_static_hadoop_common_RpcResponseHeaderProto_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_hadoop_common_RpcResponseHeaderProto_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hadoop_common_RpcResponseHeaderProto_descriptor,
new java.lang.String[] { "CallId", "Status", "ServerIpcVersionNum", "ExceptionClassName", "ErrorMsg", "ErrorDetail", "ClientId", "RetryCount", });
internal_static_hadoop_common_RpcSaslProto_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_hadoop_common_RpcSaslProto_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hadoop_common_RpcSaslProto_descriptor,
new java.lang.String[] { "Version", "State", "Token", "Auths", });
internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor =
internal_static_hadoop_common_RpcSaslProto_descriptor.getNestedTypes().get(0);
internal_static_hadoop_common_RpcSaslProto_SaslAuth_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor,
new java.lang.String[] { "Method", "Mechanism", "Protocol", "ServerId", "Challenge", });
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
// @@protoc_insertion_point(outer_class_scope)
}
© 2015 - 2024 Weber Informatics LLC | Privacy Policy