org.apache.hadoop.ipc.protobuf.RpcHeaderProtos Maven / Gradle / Ivy
The newest version!
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: RpcHeader.proto
package org.apache.hadoop.ipc.protobuf;
public final class RpcHeaderProtos {
private RpcHeaderProtos() {}
public static void registerAllExtensions(
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite) registry);
}
/**
*
**
* RpcKind determine the rpcEngine and the serialization of the rpc request
*
*
* Protobuf enum {@code hadoop.common.RpcKindProto}
*/
public enum RpcKindProto
implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
/**
*
* Used for built in calls by tests
*
*
* RPC_BUILTIN = 0;
*/
RPC_BUILTIN(0),
/**
*
* Use WritableRpcEngine
*
*
* RPC_WRITABLE = 1;
*/
RPC_WRITABLE(1),
/**
*
* Use ProtobufRpcEngine
*
*
* RPC_PROTOCOL_BUFFER = 2;
*/
RPC_PROTOCOL_BUFFER(2),
;
/**
*
* Used for built in calls by tests
*
*
* RPC_BUILTIN = 0;
*/
public static final int RPC_BUILTIN_VALUE = 0;
/**
*
* Use WritableRpcEngine
*
*
* RPC_WRITABLE = 1;
*/
public static final int RPC_WRITABLE_VALUE = 1;
/**
*
* Use ProtobufRpcEngine
*
*
* RPC_PROTOCOL_BUFFER = 2;
*/
public static final int RPC_PROTOCOL_BUFFER_VALUE = 2;
public final int getNumber() {
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static RpcKindProto valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static RpcKindProto forNumber(int value) {
switch (value) {
case 0: return RPC_BUILTIN;
case 1: return RPC_WRITABLE;
case 2: return RPC_PROTOCOL_BUFFER;
default: return null;
}
}
public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
RpcKindProto> internalValueMap =
new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() {
public RpcKindProto findValueByNumber(int number) {
return RpcKindProto.forNumber(number);
}
};
public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.getDescriptor().getEnumTypes().get(0);
}
private static final RpcKindProto[] VALUES = values();
public static RpcKindProto valueOf(
org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private RpcKindProto(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hadoop.common.RpcKindProto)
}
public interface RPCTraceInfoProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.common.RPCTraceInfoProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
*
* parentIdHigh
*
*
* optional int64 traceId = 1;
* @return Whether the traceId field is set.
*/
boolean hasTraceId();
/**
*
* parentIdHigh
*
*
* optional int64 traceId = 1;
* @return The traceId.
*/
long getTraceId();
/**
*
* parentIdLow
*
*
* optional int64 parentId = 2;
* @return Whether the parentId field is set.
*/
boolean hasParentId();
/**
*
* parentIdLow
*
*
* optional int64 parentId = 2;
* @return The parentId.
*/
long getParentId();
/**
*
* Trace SpanContext
*
*
* optional bytes spanContext = 3;
* @return Whether the spanContext field is set.
*/
boolean hasSpanContext();
/**
*
* Trace SpanContext
*
*
* optional bytes spanContext = 3;
* @return The spanContext.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString getSpanContext();
}
/**
*
**
* Used to pass through the information necessary to continue
* a trace after an RPC is made. All we need is the traceid
* (so we know the overarching trace this message is a part of), and
* the id of the current span when this message was sent, so we know
* what span caused the new span we will create when this message is received.
*
*
* Protobuf type {@code hadoop.common.RPCTraceInfoProto}
*/
public static final class RPCTraceInfoProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.common.RPCTraceInfoProto)
RPCTraceInfoProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RPCTraceInfoProto.newBuilder() to construct.
private RPCTraceInfoProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RPCTraceInfoProto() {
spanContext_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RPCTraceInfoProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCTraceInfoProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCTraceInfoProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder.class);
}
private int bitField0_;
public static final int TRACEID_FIELD_NUMBER = 1;
private long traceId_ = 0L;
/**
*
* parentIdHigh
*
*
* optional int64 traceId = 1;
* @return Whether the traceId field is set.
*/
@java.lang.Override
public boolean hasTraceId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
* parentIdHigh
*
*
* optional int64 traceId = 1;
* @return The traceId.
*/
@java.lang.Override
public long getTraceId() {
return traceId_;
}
public static final int PARENTID_FIELD_NUMBER = 2;
private long parentId_ = 0L;
/**
*
* parentIdLow
*
*
* optional int64 parentId = 2;
* @return Whether the parentId field is set.
*/
@java.lang.Override
public boolean hasParentId() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
* parentIdLow
*
*
* optional int64 parentId = 2;
* @return The parentId.
*/
@java.lang.Override
public long getParentId() {
return parentId_;
}
public static final int SPANCONTEXT_FIELD_NUMBER = 3;
private org.apache.hadoop.thirdparty.protobuf.ByteString spanContext_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
/**
*
* Trace SpanContext
*
*
* optional bytes spanContext = 3;
* @return Whether the spanContext field is set.
*/
@java.lang.Override
public boolean hasSpanContext() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
* Trace SpanContext
*
*
* optional bytes spanContext = 3;
* @return The spanContext.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString getSpanContext() {
return spanContext_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeInt64(1, traceId_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeInt64(2, parentId_);
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeBytes(3, spanContext_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt64Size(1, traceId_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt64Size(2, parentId_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeBytesSize(3, spanContext_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto)) {
return super.equals(obj);
}
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto) obj;
if (hasTraceId() != other.hasTraceId()) return false;
if (hasTraceId()) {
if (getTraceId()
!= other.getTraceId()) return false;
}
if (hasParentId() != other.hasParentId()) return false;
if (hasParentId()) {
if (getParentId()
!= other.getParentId()) return false;
}
if (hasSpanContext() != other.hasSpanContext()) return false;
if (hasSpanContext()) {
if (!getSpanContext()
.equals(other.getSpanContext())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTraceId()) {
hash = (37 * hash) + TRACEID_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getTraceId());
}
if (hasParentId()) {
hash = (37 * hash) + PARENTID_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getParentId());
}
if (hasSpanContext()) {
hash = (37 * hash) + SPANCONTEXT_FIELD_NUMBER;
hash = (53 * hash) + getSpanContext().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
**
* Used to pass through the information necessary to continue
* a trace after an RPC is made. All we need is the traceid
* (so we know the overarching trace this message is a part of), and
* the id of the current span when this message was sent, so we know
* what span caused the new span we will create when this message is received.
*
*
* Protobuf type {@code hadoop.common.RPCTraceInfoProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.common.RPCTraceInfoProto)
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCTraceInfoProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCTraceInfoProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder.class);
}
// Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
traceId_ = 0L;
parentId_ = 0L;
spanContext_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCTraceInfoProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto getDefaultInstanceForType() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto build() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto buildPartial() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.traceId_ = traceId_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.parentId_ = parentId_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.spanContext_ = spanContext_;
to_bitField0_ |= 0x00000004;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto) {
return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto other) {
if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance()) return this;
if (other.hasTraceId()) {
setTraceId(other.getTraceId());
}
if (other.hasParentId()) {
setParentId(other.getParentId());
}
if (other.hasSpanContext()) {
setSpanContext(other.getSpanContext());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
traceId_ = input.readInt64();
bitField0_ |= 0x00000001;
break;
} // case 8
case 16: {
parentId_ = input.readInt64();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26: {
spanContext_ = input.readBytes();
bitField0_ |= 0x00000004;
break;
} // case 26
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private long traceId_ ;
/**
*
* parentIdHigh
*
*
* optional int64 traceId = 1;
* @return Whether the traceId field is set.
*/
@java.lang.Override
public boolean hasTraceId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
* parentIdHigh
*
*
* optional int64 traceId = 1;
* @return The traceId.
*/
@java.lang.Override
public long getTraceId() {
return traceId_;
}
/**
*
* parentIdHigh
*
*
* optional int64 traceId = 1;
* @param value The traceId to set.
* @return This builder for chaining.
*/
public Builder setTraceId(long value) {
traceId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
* parentIdHigh
*
*
* optional int64 traceId = 1;
* @return This builder for chaining.
*/
public Builder clearTraceId() {
bitField0_ = (bitField0_ & ~0x00000001);
traceId_ = 0L;
onChanged();
return this;
}
private long parentId_ ;
/**
*
* parentIdLow
*
*
* optional int64 parentId = 2;
* @return Whether the parentId field is set.
*/
@java.lang.Override
public boolean hasParentId() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
* parentIdLow
*
*
* optional int64 parentId = 2;
* @return The parentId.
*/
@java.lang.Override
public long getParentId() {
return parentId_;
}
/**
*
* parentIdLow
*
*
* optional int64 parentId = 2;
* @param value The parentId to set.
* @return This builder for chaining.
*/
public Builder setParentId(long value) {
parentId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
* parentIdLow
*
*
* optional int64 parentId = 2;
* @return This builder for chaining.
*/
public Builder clearParentId() {
bitField0_ = (bitField0_ & ~0x00000002);
parentId_ = 0L;
onChanged();
return this;
}
private org.apache.hadoop.thirdparty.protobuf.ByteString spanContext_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
/**
*
* Trace SpanContext
*
*
* optional bytes spanContext = 3;
* @return Whether the spanContext field is set.
*/
@java.lang.Override
public boolean hasSpanContext() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
* Trace SpanContext
*
*
* optional bytes spanContext = 3;
* @return The spanContext.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString getSpanContext() {
return spanContext_;
}
/**
*
* Trace SpanContext
*
*
* optional bytes spanContext = 3;
* @param value The spanContext to set.
* @return This builder for chaining.
*/
public Builder setSpanContext(org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
spanContext_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
* Trace SpanContext
*
*
* optional bytes spanContext = 3;
* @return This builder for chaining.
*/
public Builder clearSpanContext() {
bitField0_ = (bitField0_ & ~0x00000004);
spanContext_ = getDefaultInstance().getSpanContext();
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.common.RPCTraceInfoProto)
}
// @@protoc_insertion_point(class_scope:hadoop.common.RPCTraceInfoProto)
private static final org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto();
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RPCTraceInfoProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RPCCallerContextProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.common.RPCCallerContextProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required string context = 1;
* @return Whether the context field is set.
*/
boolean hasContext();
/**
* required string context = 1;
* @return The context.
*/
java.lang.String getContext();
/**
* required string context = 1;
* @return The bytes for context.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getContextBytes();
/**
* optional bytes signature = 2;
* @return Whether the signature field is set.
*/
boolean hasSignature();
/**
* optional bytes signature = 2;
* @return The signature.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString getSignature();
}
/**
*
**
* Used to pass through the call context entry after an RPC is made.
*
*
* Protobuf type {@code hadoop.common.RPCCallerContextProto}
*/
public static final class RPCCallerContextProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.common.RPCCallerContextProto)
RPCCallerContextProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RPCCallerContextProto.newBuilder() to construct.
private RPCCallerContextProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RPCCallerContextProto() {
context_ = "";
signature_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RPCCallerContextProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCCallerContextProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCCallerContextProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.Builder.class);
}
private int bitField0_;
public static final int CONTEXT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object context_ = "";
/**
* required string context = 1;
* @return Whether the context field is set.
*/
@java.lang.Override
public boolean hasContext() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string context = 1;
* @return The context.
*/
@java.lang.Override
public java.lang.String getContext() {
java.lang.Object ref = context_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
context_ = s;
}
return s;
}
}
/**
* required string context = 1;
* @return The bytes for context.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getContextBytes() {
java.lang.Object ref = context_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
context_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int SIGNATURE_FIELD_NUMBER = 2;
private org.apache.hadoop.thirdparty.protobuf.ByteString signature_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
/**
* optional bytes signature = 2;
* @return Whether the signature field is set.
*/
@java.lang.Override
public boolean hasSignature() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional bytes signature = 2;
* @return The signature.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString getSignature() {
return signature_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasContext()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, context_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeBytes(2, signature_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, context_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeBytesSize(2, signature_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto)) {
return super.equals(obj);
}
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto) obj;
if (hasContext() != other.hasContext()) return false;
if (hasContext()) {
if (!getContext()
.equals(other.getContext())) return false;
}
if (hasSignature() != other.hasSignature()) return false;
if (hasSignature()) {
if (!getSignature()
.equals(other.getSignature())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasContext()) {
hash = (37 * hash) + CONTEXT_FIELD_NUMBER;
hash = (53 * hash) + getContext().hashCode();
}
if (hasSignature()) {
hash = (37 * hash) + SIGNATURE_FIELD_NUMBER;
hash = (53 * hash) + getSignature().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
**
* Used to pass through the call context entry after an RPC is made.
*
*
* Protobuf type {@code hadoop.common.RPCCallerContextProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.common.RPCCallerContextProto)
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCCallerContextProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCCallerContextProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.Builder.class);
}
// Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
context_ = "";
signature_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCCallerContextProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto getDefaultInstanceForType() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto build() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto buildPartial() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.context_ = context_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.signature_ = signature_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto) {
return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto other) {
if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.getDefaultInstance()) return this;
if (other.hasContext()) {
context_ = other.context_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasSignature()) {
setSignature(other.getSignature());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasContext()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
context_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
signature_ = input.readBytes();
bitField0_ |= 0x00000002;
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object context_ = "";
/**
* required string context = 1;
* @return Whether the context field is set.
*/
public boolean hasContext() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string context = 1;
* @return The context.
*/
public java.lang.String getContext() {
java.lang.Object ref = context_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
context_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string context = 1;
* @return The bytes for context.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getContextBytes() {
java.lang.Object ref = context_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
context_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string context = 1;
* @param value The context to set.
* @return This builder for chaining.
*/
public Builder setContext(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
context_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* required string context = 1;
* @return This builder for chaining.
*/
public Builder clearContext() {
context_ = getDefaultInstance().getContext();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* required string context = 1;
* @param value The bytes for context to set.
* @return This builder for chaining.
*/
public Builder setContextBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
context_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private org.apache.hadoop.thirdparty.protobuf.ByteString signature_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
/**
* optional bytes signature = 2;
* @return Whether the signature field is set.
*/
@java.lang.Override
public boolean hasSignature() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional bytes signature = 2;
* @return The signature.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString getSignature() {
return signature_;
}
/**
* optional bytes signature = 2;
* @param value The signature to set.
* @return This builder for chaining.
*/
public Builder setSignature(org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
signature_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional bytes signature = 2;
* @return This builder for chaining.
*/
public Builder clearSignature() {
bitField0_ = (bitField0_ & ~0x00000002);
signature_ = getDefaultInstance().getSignature();
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.common.RPCCallerContextProto)
}
// @@protoc_insertion_point(class_scope:hadoop.common.RPCCallerContextProto)
private static final org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto();
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RPCCallerContextProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RpcRequestHeaderProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.common.RpcRequestHeaderProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.common.RpcKindProto rpcKind = 1;
* @return Whether the rpcKind field is set.
*/
boolean hasRpcKind();
/**
* optional .hadoop.common.RpcKindProto rpcKind = 1;
* @return The rpcKind.
*/
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto getRpcKind();
/**
* optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
* @return Whether the rpcOp field is set.
*/
boolean hasRpcOp();
/**
* optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
* @return The rpcOp.
*/
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto getRpcOp();
/**
*
* a sequence number that is sent back in response
*
*
* required sint32 callId = 3;
* @return Whether the callId field is set.
*/
boolean hasCallId();
/**
*
* a sequence number that is sent back in response
*
*
* required sint32 callId = 3;
* @return The callId.
*/
int getCallId();
/**
*
* Globally unique client ID
*
*
* required bytes clientId = 4;
* @return Whether the clientId field is set.
*/
boolean hasClientId();
/**
*
* Globally unique client ID
*
*
* required bytes clientId = 4;
* @return The clientId.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString getClientId();
/**
*
* clientId + callId uniquely identifies a request
* retry count, 1 means this is the first retry
*
*
* optional sint32 retryCount = 5 [default = -1];
* @return Whether the retryCount field is set.
*/
boolean hasRetryCount();
/**
*
* clientId + callId uniquely identifies a request
* retry count, 1 means this is the first retry
*
*
* optional sint32 retryCount = 5 [default = -1];
* @return The retryCount.
*/
int getRetryCount();
/**
*
* tracing info
*
*
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
* @return Whether the traceInfo field is set.
*/
boolean hasTraceInfo();
/**
*
* tracing info
*
*
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
* @return The traceInfo.
*/
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto getTraceInfo();
/**
*
* tracing info
*
*
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
*/
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder getTraceInfoOrBuilder();
/**
*
* call context
*
*
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
* @return Whether the callerContext field is set.
*/
boolean hasCallerContext();
/**
*
* call context
*
*
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
* @return The callerContext.
*/
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto getCallerContext();
/**
*
* call context
*
*
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
*/
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProtoOrBuilder getCallerContextOrBuilder();
/**
*
* The last seen Global State ID
*
*
* optional int64 stateId = 8;
* @return Whether the stateId field is set.
*/
boolean hasStateId();
/**
*
* The last seen Global State ID
*
*
* optional int64 stateId = 8;
* @return The stateId.
*/
long getStateId();
/**
*
* Alignment context info for use with routers.
* The client should not interpret these bytes, but only forward bytes
* received from RpcResponseHeaderProto.routerFederatedState.
*
*
* optional bytes routerFederatedState = 9;
* @return Whether the routerFederatedState field is set.
*/
boolean hasRouterFederatedState();
/**
*
* Alignment context info for use with routers.
* The client should not interpret these bytes, but only forward bytes
* received from RpcResponseHeaderProto.routerFederatedState.
*
*
* optional bytes routerFederatedState = 9;
* @return The routerFederatedState.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString getRouterFederatedState();
}
/**
*
* the header for the RpcRequest
*
*
* Protobuf type {@code hadoop.common.RpcRequestHeaderProto}
*/
public static final class RpcRequestHeaderProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.common.RpcRequestHeaderProto)
RpcRequestHeaderProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RpcRequestHeaderProto.newBuilder() to construct.
private RpcRequestHeaderProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RpcRequestHeaderProto() {
rpcKind_ = 0;
rpcOp_ = 0;
clientId_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
retryCount_ = -1;
routerFederatedState_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RpcRequestHeaderProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.Builder.class);
}
/**
* Protobuf enum {@code hadoop.common.RpcRequestHeaderProto.OperationProto}
*/
public enum OperationProto
implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
/**
*
* The final RPC Packet
*
*
* RPC_FINAL_PACKET = 0;
*/
RPC_FINAL_PACKET(0),
/**
*
* not implemented yet
*
*
* RPC_CONTINUATION_PACKET = 1;
*/
RPC_CONTINUATION_PACKET(1),
/**
*
* close the rpc connection
*
*
* RPC_CLOSE_CONNECTION = 2;
*/
RPC_CLOSE_CONNECTION(2),
;
/**
*
* The final RPC Packet
*
*
* RPC_FINAL_PACKET = 0;
*/
public static final int RPC_FINAL_PACKET_VALUE = 0;
/**
*
* not implemented yet
*
*
* RPC_CONTINUATION_PACKET = 1;
*/
public static final int RPC_CONTINUATION_PACKET_VALUE = 1;
/**
*
* close the rpc connection
*
*
* RPC_CLOSE_CONNECTION = 2;
*/
public static final int RPC_CLOSE_CONNECTION_VALUE = 2;
public final int getNumber() {
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static OperationProto valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static OperationProto forNumber(int value) {
switch (value) {
case 0: return RPC_FINAL_PACKET;
case 1: return RPC_CONTINUATION_PACKET;
case 2: return RPC_CLOSE_CONNECTION;
default: return null;
}
}
public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
OperationProto> internalValueMap =
new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() {
public OperationProto findValueByNumber(int number) {
return OperationProto.forNumber(number);
}
};
public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.getDescriptor().getEnumTypes().get(0);
}
private static final OperationProto[] VALUES = values();
public static OperationProto valueOf(
org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private OperationProto(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hadoop.common.RpcRequestHeaderProto.OperationProto)
}
private int bitField0_;
public static final int RPCKIND_FIELD_NUMBER = 1;
private int rpcKind_ = 0;
/**
* optional .hadoop.common.RpcKindProto rpcKind = 1;
* @return Whether the rpcKind field is set.
*/
@java.lang.Override public boolean hasRpcKind() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.common.RpcKindProto rpcKind = 1;
* @return The rpcKind.
*/
@java.lang.Override public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto getRpcKind() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto result = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.forNumber(rpcKind_);
return result == null ? org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.RPC_BUILTIN : result;
}
public static final int RPCOP_FIELD_NUMBER = 2;
private int rpcOp_ = 0;
/**
* optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
* @return Whether the rpcOp field is set.
*/
@java.lang.Override public boolean hasRpcOp() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
* @return The rpcOp.
*/
@java.lang.Override public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto getRpcOp() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto result = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.forNumber(rpcOp_);
return result == null ? org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.RPC_FINAL_PACKET : result;
}
public static final int CALLID_FIELD_NUMBER = 3;
private int callId_ = 0;
/**
*
* a sequence number that is sent back in response
*
*
* required sint32 callId = 3;
* @return Whether the callId field is set.
*/
@java.lang.Override
public boolean hasCallId() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
* a sequence number that is sent back in response
*
*
* required sint32 callId = 3;
* @return The callId.
*/
@java.lang.Override
public int getCallId() {
return callId_;
}
public static final int CLIENTID_FIELD_NUMBER = 4;
private org.apache.hadoop.thirdparty.protobuf.ByteString clientId_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
/**
*
* Globally unique client ID
*
*
* required bytes clientId = 4;
* @return Whether the clientId field is set.
*/
@java.lang.Override
public boolean hasClientId() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
*
* Globally unique client ID
*
*
* required bytes clientId = 4;
* @return The clientId.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString getClientId() {
return clientId_;
}
public static final int RETRYCOUNT_FIELD_NUMBER = 5;
private int retryCount_ = -1;
/**
*
* clientId + callId uniquely identifies a request
* retry count, 1 means this is the first retry
*
*
* optional sint32 retryCount = 5 [default = -1];
* @return Whether the retryCount field is set.
*/
@java.lang.Override
public boolean hasRetryCount() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
*
* clientId + callId uniquely identifies a request
* retry count, 1 means this is the first retry
*
*
* optional sint32 retryCount = 5 [default = -1];
* @return The retryCount.
*/
@java.lang.Override
public int getRetryCount() {
return retryCount_;
}
public static final int TRACEINFO_FIELD_NUMBER = 6;
private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto traceInfo_;
/**
*
* tracing info
*
*
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
* @return Whether the traceInfo field is set.
*/
@java.lang.Override
public boolean hasTraceInfo() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
*
* tracing info
*
*
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
* @return The traceInfo.
*/
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto getTraceInfo() {
return traceInfo_ == null ? org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance() : traceInfo_;
}
/**
*
* tracing info
*
*
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
*/
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder getTraceInfoOrBuilder() {
return traceInfo_ == null ? org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance() : traceInfo_;
}
public static final int CALLERCONTEXT_FIELD_NUMBER = 7;
private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto callerContext_;
/**
*
* call context
*
*
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
* @return Whether the callerContext field is set.
*/
@java.lang.Override
public boolean hasCallerContext() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
*
* call context
*
*
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
* @return The callerContext.
*/
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto getCallerContext() {
return callerContext_ == null ? org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.getDefaultInstance() : callerContext_;
}
/**
*
* call context
*
*
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
*/
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProtoOrBuilder getCallerContextOrBuilder() {
return callerContext_ == null ? org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.getDefaultInstance() : callerContext_;
}
public static final int STATEID_FIELD_NUMBER = 8;
private long stateId_ = 0L;
/**
*
* The last seen Global State ID
*
*
* optional int64 stateId = 8;
* @return Whether the stateId field is set.
*/
@java.lang.Override
public boolean hasStateId() {
return ((bitField0_ & 0x00000080) != 0);
}
/**
*
* The last seen Global State ID
*
*
* optional int64 stateId = 8;
* @return The stateId.
*/
@java.lang.Override
public long getStateId() {
return stateId_;
}
public static final int ROUTERFEDERATEDSTATE_FIELD_NUMBER = 9;
private org.apache.hadoop.thirdparty.protobuf.ByteString routerFederatedState_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
/**
*
* Alignment context info for use with routers.
* The client should not interpret these bytes, but only forward bytes
* received from RpcResponseHeaderProto.routerFederatedState.
*
*
* optional bytes routerFederatedState = 9;
* @return Whether the routerFederatedState field is set.
*/
@java.lang.Override
public boolean hasRouterFederatedState() {
return ((bitField0_ & 0x00000100) != 0);
}
/**
*
* Alignment context info for use with routers.
* The client should not interpret these bytes, but only forward bytes
* received from RpcResponseHeaderProto.routerFederatedState.
*
*
* optional bytes routerFederatedState = 9;
* @return The routerFederatedState.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString getRouterFederatedState() {
return routerFederatedState_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasCallId()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasClientId()) {
memoizedIsInitialized = 0;
return false;
}
if (hasCallerContext()) {
if (!getCallerContext().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeEnum(1, rpcKind_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeEnum(2, rpcOp_);
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeSInt32(3, callId_);
}
if (((bitField0_ & 0x00000008) != 0)) {
output.writeBytes(4, clientId_);
}
if (((bitField0_ & 0x00000010) != 0)) {
output.writeSInt32(5, retryCount_);
}
if (((bitField0_ & 0x00000020) != 0)) {
output.writeMessage(6, getTraceInfo());
}
if (((bitField0_ & 0x00000040) != 0)) {
output.writeMessage(7, getCallerContext());
}
if (((bitField0_ & 0x00000080) != 0)) {
output.writeInt64(8, stateId_);
}
if (((bitField0_ & 0x00000100) != 0)) {
output.writeBytes(9, routerFederatedState_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeEnumSize(1, rpcKind_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeEnumSize(2, rpcOp_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeSInt32Size(3, callId_);
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeBytesSize(4, clientId_);
}
if (((bitField0_ & 0x00000010) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeSInt32Size(5, retryCount_);
}
if (((bitField0_ & 0x00000020) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(6, getTraceInfo());
}
if (((bitField0_ & 0x00000040) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(7, getCallerContext());
}
if (((bitField0_ & 0x00000080) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt64Size(8, stateId_);
}
if (((bitField0_ & 0x00000100) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeBytesSize(9, routerFederatedState_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto)) {
return super.equals(obj);
}
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto) obj;
if (hasRpcKind() != other.hasRpcKind()) return false;
if (hasRpcKind()) {
if (rpcKind_ != other.rpcKind_) return false;
}
if (hasRpcOp() != other.hasRpcOp()) return false;
if (hasRpcOp()) {
if (rpcOp_ != other.rpcOp_) return false;
}
if (hasCallId() != other.hasCallId()) return false;
if (hasCallId()) {
if (getCallId()
!= other.getCallId()) return false;
}
if (hasClientId() != other.hasClientId()) return false;
if (hasClientId()) {
if (!getClientId()
.equals(other.getClientId())) return false;
}
if (hasRetryCount() != other.hasRetryCount()) return false;
if (hasRetryCount()) {
if (getRetryCount()
!= other.getRetryCount()) return false;
}
if (hasTraceInfo() != other.hasTraceInfo()) return false;
if (hasTraceInfo()) {
if (!getTraceInfo()
.equals(other.getTraceInfo())) return false;
}
if (hasCallerContext() != other.hasCallerContext()) return false;
if (hasCallerContext()) {
if (!getCallerContext()
.equals(other.getCallerContext())) return false;
}
if (hasStateId() != other.hasStateId()) return false;
if (hasStateId()) {
if (getStateId()
!= other.getStateId()) return false;
}
if (hasRouterFederatedState() != other.hasRouterFederatedState()) return false;
if (hasRouterFederatedState()) {
if (!getRouterFederatedState()
.equals(other.getRouterFederatedState())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasRpcKind()) {
hash = (37 * hash) + RPCKIND_FIELD_NUMBER;
hash = (53 * hash) + rpcKind_;
}
if (hasRpcOp()) {
hash = (37 * hash) + RPCOP_FIELD_NUMBER;
hash = (53 * hash) + rpcOp_;
}
if (hasCallId()) {
hash = (37 * hash) + CALLID_FIELD_NUMBER;
hash = (53 * hash) + getCallId();
}
if (hasClientId()) {
hash = (37 * hash) + CLIENTID_FIELD_NUMBER;
hash = (53 * hash) + getClientId().hashCode();
}
if (hasRetryCount()) {
hash = (37 * hash) + RETRYCOUNT_FIELD_NUMBER;
hash = (53 * hash) + getRetryCount();
}
if (hasTraceInfo()) {
hash = (37 * hash) + TRACEINFO_FIELD_NUMBER;
hash = (53 * hash) + getTraceInfo().hashCode();
}
if (hasCallerContext()) {
hash = (37 * hash) + CALLERCONTEXT_FIELD_NUMBER;
hash = (53 * hash) + getCallerContext().hashCode();
}
if (hasStateId()) {
hash = (37 * hash) + STATEID_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getStateId());
}
if (hasRouterFederatedState()) {
hash = (37 * hash) + ROUTERFEDERATEDSTATE_FIELD_NUMBER;
hash = (53 * hash) + getRouterFederatedState().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* the header for the RpcRequest
*
*
* Protobuf type {@code hadoop.common.RpcRequestHeaderProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.common.RpcRequestHeaderProto)
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.Builder.class);
}
// Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getTraceInfoFieldBuilder();
getCallerContextFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
rpcKind_ = 0;
rpcOp_ = 0;
callId_ = 0;
clientId_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
retryCount_ = -1;
traceInfo_ = null;
if (traceInfoBuilder_ != null) {
traceInfoBuilder_.dispose();
traceInfoBuilder_ = null;
}
callerContext_ = null;
if (callerContextBuilder_ != null) {
callerContextBuilder_.dispose();
callerContextBuilder_ = null;
}
stateId_ = 0L;
routerFederatedState_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto getDefaultInstanceForType() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto build() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto buildPartial() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.rpcKind_ = rpcKind_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.rpcOp_ = rpcOp_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.callId_ = callId_;
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.clientId_ = clientId_;
to_bitField0_ |= 0x00000008;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.retryCount_ = retryCount_;
to_bitField0_ |= 0x00000010;
}
if (((from_bitField0_ & 0x00000020) != 0)) {
result.traceInfo_ = traceInfoBuilder_ == null
? traceInfo_
: traceInfoBuilder_.build();
to_bitField0_ |= 0x00000020;
}
if (((from_bitField0_ & 0x00000040) != 0)) {
result.callerContext_ = callerContextBuilder_ == null
? callerContext_
: callerContextBuilder_.build();
to_bitField0_ |= 0x00000040;
}
if (((from_bitField0_ & 0x00000080) != 0)) {
result.stateId_ = stateId_;
to_bitField0_ |= 0x00000080;
}
if (((from_bitField0_ & 0x00000100) != 0)) {
result.routerFederatedState_ = routerFederatedState_;
to_bitField0_ |= 0x00000100;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto) {
return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto other) {
if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.getDefaultInstance()) return this;
if (other.hasRpcKind()) {
setRpcKind(other.getRpcKind());
}
if (other.hasRpcOp()) {
setRpcOp(other.getRpcOp());
}
if (other.hasCallId()) {
setCallId(other.getCallId());
}
if (other.hasClientId()) {
setClientId(other.getClientId());
}
if (other.hasRetryCount()) {
setRetryCount(other.getRetryCount());
}
if (other.hasTraceInfo()) {
mergeTraceInfo(other.getTraceInfo());
}
if (other.hasCallerContext()) {
mergeCallerContext(other.getCallerContext());
}
if (other.hasStateId()) {
setStateId(other.getStateId());
}
if (other.hasRouterFederatedState()) {
setRouterFederatedState(other.getRouterFederatedState());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasCallId()) {
return false;
}
if (!hasClientId()) {
return false;
}
if (hasCallerContext()) {
if (!getCallerContext().isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
int tmpRaw = input.readEnum();
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto tmpValue =
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.forNumber(tmpRaw);
if (tmpValue == null) {
mergeUnknownVarintField(1, tmpRaw);
} else {
rpcKind_ = tmpRaw;
bitField0_ |= 0x00000001;
}
break;
} // case 8
case 16: {
int tmpRaw = input.readEnum();
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto tmpValue =
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.forNumber(tmpRaw);
if (tmpValue == null) {
mergeUnknownVarintField(2, tmpRaw);
} else {
rpcOp_ = tmpRaw;
bitField0_ |= 0x00000002;
}
break;
} // case 16
case 24: {
callId_ = input.readSInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34: {
clientId_ = input.readBytes();
bitField0_ |= 0x00000008;
break;
} // case 34
case 40: {
retryCount_ = input.readSInt32();
bitField0_ |= 0x00000010;
break;
} // case 40
case 50: {
input.readMessage(
getTraceInfoFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000020;
break;
} // case 50
case 58: {
input.readMessage(
getCallerContextFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000040;
break;
} // case 58
case 64: {
stateId_ = input.readInt64();
bitField0_ |= 0x00000080;
break;
} // case 64
case 74: {
routerFederatedState_ = input.readBytes();
bitField0_ |= 0x00000100;
break;
} // case 74
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int rpcKind_ = 0;
/**
* optional .hadoop.common.RpcKindProto rpcKind = 1;
* @return Whether the rpcKind field is set.
*/
@java.lang.Override public boolean hasRpcKind() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.common.RpcKindProto rpcKind = 1;
* @return The rpcKind.
*/
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto getRpcKind() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto result = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.forNumber(rpcKind_);
return result == null ? org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.RPC_BUILTIN : result;
}
/**
* optional .hadoop.common.RpcKindProto rpcKind = 1;
* @param value The rpcKind to set.
* @return This builder for chaining.
*/
public Builder setRpcKind(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
rpcKind_ = value.getNumber();
onChanged();
return this;
}
/**
* optional .hadoop.common.RpcKindProto rpcKind = 1;
* @return This builder for chaining.
*/
public Builder clearRpcKind() {
bitField0_ = (bitField0_ & ~0x00000001);
rpcKind_ = 0;
onChanged();
return this;
}
private int rpcOp_ = 0;
/**
* optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
* @return Whether the rpcOp field is set.
*/
@java.lang.Override public boolean hasRpcOp() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
* @return The rpcOp.
*/
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto getRpcOp() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto result = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.forNumber(rpcOp_);
return result == null ? org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.RPC_FINAL_PACKET : result;
}
/**
* optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
* @param value The rpcOp to set.
* @return This builder for chaining.
*/
public Builder setRpcOp(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
rpcOp_ = value.getNumber();
onChanged();
return this;
}
/**
* optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
* @return This builder for chaining.
*/
public Builder clearRpcOp() {
bitField0_ = (bitField0_ & ~0x00000002);
rpcOp_ = 0;
onChanged();
return this;
}
private int callId_ ;
/**
*
* a sequence number that is sent back in response
*
*
* required sint32 callId = 3;
* @return Whether the callId field is set.
*/
@java.lang.Override
public boolean hasCallId() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
* a sequence number that is sent back in response
*
*
* required sint32 callId = 3;
* @return The callId.
*/
@java.lang.Override
public int getCallId() {
return callId_;
}
/**
*
* a sequence number that is sent back in response
*
*
* required sint32 callId = 3;
* @param value The callId to set.
* @return This builder for chaining.
*/
public Builder setCallId(int value) {
callId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
* a sequence number that is sent back in response
*
*
* required sint32 callId = 3;
* @return This builder for chaining.
*/
public Builder clearCallId() {
bitField0_ = (bitField0_ & ~0x00000004);
callId_ = 0;
onChanged();
return this;
}
private org.apache.hadoop.thirdparty.protobuf.ByteString clientId_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
/**
*
* Globally unique client ID
*
*
* required bytes clientId = 4;
* @return Whether the clientId field is set.
*/
@java.lang.Override
public boolean hasClientId() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
*
* Globally unique client ID
*
*
* required bytes clientId = 4;
* @return The clientId.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString getClientId() {
return clientId_;
}
/**
*
* Globally unique client ID
*
*
* required bytes clientId = 4;
* @param value The clientId to set.
* @return This builder for chaining.
*/
public Builder setClientId(org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
clientId_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
* Globally unique client ID
*
*
* required bytes clientId = 4;
* @return This builder for chaining.
*/
public Builder clearClientId() {
bitField0_ = (bitField0_ & ~0x00000008);
clientId_ = getDefaultInstance().getClientId();
onChanged();
return this;
}
private int retryCount_ = -1;
/**
*
* clientId + callId uniquely identifies a request
* retry count, 1 means this is the first retry
*
*
* optional sint32 retryCount = 5 [default = -1];
* @return Whether the retryCount field is set.
*/
@java.lang.Override
public boolean hasRetryCount() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
*
* clientId + callId uniquely identifies a request
* retry count, 1 means this is the first retry
*
*
* optional sint32 retryCount = 5 [default = -1];
* @return The retryCount.
*/
@java.lang.Override
public int getRetryCount() {
return retryCount_;
}
/**
*
* clientId + callId uniquely identifies a request
* retry count, 1 means this is the first retry
*
*
* optional sint32 retryCount = 5 [default = -1];
* @param value The retryCount to set.
* @return This builder for chaining.
*/
public Builder setRetryCount(int value) {
retryCount_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
* clientId + callId uniquely identifies a request
* retry count, 1 means this is the first retry
*
*
* optional sint32 retryCount = 5 [default = -1];
* @return This builder for chaining.
*/
public Builder clearRetryCount() {
bitField0_ = (bitField0_ & ~0x00000010);
retryCount_ = -1;
onChanged();
return this;
}
private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto traceInfo_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder> traceInfoBuilder_;
/**
*
* tracing info
*
*
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
* @return Whether the traceInfo field is set.
*/
public boolean hasTraceInfo() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
*
* tracing info
*
*
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
* @return The traceInfo.
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto getTraceInfo() {
if (traceInfoBuilder_ == null) {
return traceInfo_ == null ? org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance() : traceInfo_;
} else {
return traceInfoBuilder_.getMessage();
}
}
/**
*
* tracing info
*
*
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
*/
public Builder setTraceInfo(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto value) {
if (traceInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
traceInfo_ = value;
} else {
traceInfoBuilder_.setMessage(value);
}
bitField0_ |= 0x00000020;
onChanged();
return this;
}
/**
*
* tracing info
*
*
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
*/
public Builder setTraceInfo(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder builderForValue) {
if (traceInfoBuilder_ == null) {
traceInfo_ = builderForValue.build();
} else {
traceInfoBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000020;
onChanged();
return this;
}
/**
*
* tracing info
*
*
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
*/
public Builder mergeTraceInfo(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto value) {
if (traceInfoBuilder_ == null) {
if (((bitField0_ & 0x00000020) != 0) &&
traceInfo_ != null &&
traceInfo_ != org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance()) {
getTraceInfoBuilder().mergeFrom(value);
} else {
traceInfo_ = value;
}
} else {
traceInfoBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000020;
onChanged();
return this;
}
/**
*
* tracing info
*
*
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
*/
public Builder clearTraceInfo() {
bitField0_ = (bitField0_ & ~0x00000020);
traceInfo_ = null;
if (traceInfoBuilder_ != null) {
traceInfoBuilder_.dispose();
traceInfoBuilder_ = null;
}
onChanged();
return this;
}
/**
*
* tracing info
*
*
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder getTraceInfoBuilder() {
bitField0_ |= 0x00000020;
onChanged();
return getTraceInfoFieldBuilder().getBuilder();
}
/**
*
* tracing info
*
*
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder getTraceInfoOrBuilder() {
if (traceInfoBuilder_ != null) {
return traceInfoBuilder_.getMessageOrBuilder();
} else {
return traceInfo_ == null ?
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance() : traceInfo_;
}
}
/**
*
* tracing info
*
*
* optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder>
getTraceInfoFieldBuilder() {
if (traceInfoBuilder_ == null) {
traceInfoBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder>(
getTraceInfo(),
getParentForChildren(),
isClean());
traceInfo_ = null;
}
return traceInfoBuilder_;
}
private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto callerContext_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProtoOrBuilder> callerContextBuilder_;
/**
*
* call context
*
*
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
* @return Whether the callerContext field is set.
*/
public boolean hasCallerContext() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
*
* call context
*
*
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
* @return The callerContext.
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto getCallerContext() {
if (callerContextBuilder_ == null) {
return callerContext_ == null ? org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.getDefaultInstance() : callerContext_;
} else {
return callerContextBuilder_.getMessage();
}
}
/**
*
* call context
*
*
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
*/
public Builder setCallerContext(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto value) {
if (callerContextBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
callerContext_ = value;
} else {
callerContextBuilder_.setMessage(value);
}
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
*
* call context
*
*
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
*/
public Builder setCallerContext(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.Builder builderForValue) {
if (callerContextBuilder_ == null) {
callerContext_ = builderForValue.build();
} else {
callerContextBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
*
* call context
*
*
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
*/
public Builder mergeCallerContext(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto value) {
if (callerContextBuilder_ == null) {
if (((bitField0_ & 0x00000040) != 0) &&
callerContext_ != null &&
callerContext_ != org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.getDefaultInstance()) {
getCallerContextBuilder().mergeFrom(value);
} else {
callerContext_ = value;
}
} else {
callerContextBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
*
* call context
*
*
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
*/
public Builder clearCallerContext() {
bitField0_ = (bitField0_ & ~0x00000040);
callerContext_ = null;
if (callerContextBuilder_ != null) {
callerContextBuilder_.dispose();
callerContextBuilder_ = null;
}
onChanged();
return this;
}
/**
*
* call context
*
*
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.Builder getCallerContextBuilder() {
bitField0_ |= 0x00000040;
onChanged();
return getCallerContextFieldBuilder().getBuilder();
}
/**
*
* call context
*
*
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProtoOrBuilder getCallerContextOrBuilder() {
if (callerContextBuilder_ != null) {
return callerContextBuilder_.getMessageOrBuilder();
} else {
return callerContext_ == null ?
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.getDefaultInstance() : callerContext_;
}
}
/**
*
* call context
*
*
* optional .hadoop.common.RPCCallerContextProto callerContext = 7;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProtoOrBuilder>
getCallerContextFieldBuilder() {
if (callerContextBuilder_ == null) {
callerContextBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProtoOrBuilder>(
getCallerContext(),
getParentForChildren(),
isClean());
callerContext_ = null;
}
return callerContextBuilder_;
}
private long stateId_ ;
/**
*
* The last seen Global State ID
*
*
* optional int64 stateId = 8;
* @return Whether the stateId field is set.
*/
@java.lang.Override
public boolean hasStateId() {
return ((bitField0_ & 0x00000080) != 0);
}
/**
*
* The last seen Global State ID
*
*
* optional int64 stateId = 8;
* @return The stateId.
*/
@java.lang.Override
public long getStateId() {
return stateId_;
}
/**
*
* The last seen Global State ID
*
*
* optional int64 stateId = 8;
* @param value The stateId to set.
* @return This builder for chaining.
*/
public Builder setStateId(long value) {
stateId_ = value;
bitField0_ |= 0x00000080;
onChanged();
return this;
}
/**
*
* The last seen Global State ID
*
*
* optional int64 stateId = 8;
* @return This builder for chaining.
*/
public Builder clearStateId() {
bitField0_ = (bitField0_ & ~0x00000080);
stateId_ = 0L;
onChanged();
return this;
}
private org.apache.hadoop.thirdparty.protobuf.ByteString routerFederatedState_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
/**
*
* Alignment context info for use with routers.
* The client should not interpret these bytes, but only forward bytes
* received from RpcResponseHeaderProto.routerFederatedState.
*
*
* optional bytes routerFederatedState = 9;
* @return Whether the routerFederatedState field is set.
*/
@java.lang.Override
public boolean hasRouterFederatedState() {
return ((bitField0_ & 0x00000100) != 0);
}
/**
*
* Alignment context info for use with routers.
* The client should not interpret these bytes, but only forward bytes
* received from RpcResponseHeaderProto.routerFederatedState.
*
*
* optional bytes routerFederatedState = 9;
* @return The routerFederatedState.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString getRouterFederatedState() {
return routerFederatedState_;
}
/**
*
* Alignment context info for use with routers.
* The client should not interpret these bytes, but only forward bytes
* received from RpcResponseHeaderProto.routerFederatedState.
*
*
* optional bytes routerFederatedState = 9;
* @param value The routerFederatedState to set.
* @return This builder for chaining.
*/
public Builder setRouterFederatedState(org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
routerFederatedState_ = value;
bitField0_ |= 0x00000100;
onChanged();
return this;
}
/**
*
* Alignment context info for use with routers.
* The client should not interpret these bytes, but only forward bytes
* received from RpcResponseHeaderProto.routerFederatedState.
*
*
* optional bytes routerFederatedState = 9;
* @return This builder for chaining.
*/
public Builder clearRouterFederatedState() {
bitField0_ = (bitField0_ & ~0x00000100);
routerFederatedState_ = getDefaultInstance().getRouterFederatedState();
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.common.RpcRequestHeaderProto)
}
// @@protoc_insertion_point(class_scope:hadoop.common.RpcRequestHeaderProto)
private static final org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto();
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RpcRequestHeaderProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RpcResponseHeaderProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.common.RpcResponseHeaderProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
*
* callId used in Request
*
*
* required uint32 callId = 1;
* @return Whether the callId field is set.
*/
boolean hasCallId();
/**
*
* callId used in Request
*
*
* required uint32 callId = 1;
* @return The callId.
*/
int getCallId();
/**
* required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
* @return Whether the status field is set.
*/
boolean hasStatus();
/**
* required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
* @return The status.
*/
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto getStatus();
/**
*
* Sent if success or fail
*
*
* optional uint32 serverIpcVersionNum = 3;
* @return Whether the serverIpcVersionNum field is set.
*/
boolean hasServerIpcVersionNum();
/**
*
* Sent if success or fail
*
*
* optional uint32 serverIpcVersionNum = 3;
* @return The serverIpcVersionNum.
*/
int getServerIpcVersionNum();
/**
*
* if request fails
*
*
* optional string exceptionClassName = 4;
* @return Whether the exceptionClassName field is set.
*/
boolean hasExceptionClassName();
/**
*
* if request fails
*
*
* optional string exceptionClassName = 4;
* @return The exceptionClassName.
*/
java.lang.String getExceptionClassName();
/**
*
* if request fails
*
*
* optional string exceptionClassName = 4;
* @return The bytes for exceptionClassName.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getExceptionClassNameBytes();
/**
*
* if request fails, often contains strack trace
*
*
* optional string errorMsg = 5;
* @return Whether the errorMsg field is set.
*/
boolean hasErrorMsg();
/**
*
* if request fails, often contains strack trace
*
*
* optional string errorMsg = 5;
* @return The errorMsg.
*/
java.lang.String getErrorMsg();
/**
*
* if request fails, often contains strack trace
*
*
* optional string errorMsg = 5;
* @return The bytes for errorMsg.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getErrorMsgBytes();
/**
*
* in case of error
*
*
* optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
* @return Whether the errorDetail field is set.
*/
boolean hasErrorDetail();
/**
*
* in case of error
*
*
* optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
* @return The errorDetail.
*/
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto getErrorDetail();
/**
*
* Globally unique client ID
*
*
* optional bytes clientId = 7;
* @return Whether the clientId field is set.
*/
boolean hasClientId();
/**
*
* Globally unique client ID
*
*
* optional bytes clientId = 7;
* @return The clientId.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString getClientId();
/**
* optional sint32 retryCount = 8 [default = -1];
* @return Whether the retryCount field is set.
*/
boolean hasRetryCount();
/**
* optional sint32 retryCount = 8 [default = -1];
* @return The retryCount.
*/
int getRetryCount();
/**
*
* The last written Global State ID
*
*
* optional int64 stateId = 9;
* @return Whether the stateId field is set.
*/
boolean hasStateId();
/**
*
* The last written Global State ID
*
*
* optional int64 stateId = 9;
* @return The stateId.
*/
long getStateId();
/**
*
* Alignment context info for use with routers.
* The client should not interpret these bytes, but only
* forward them to the router using RpcRequestHeaderProto.routerFederatedState.
*
*
* optional bytes routerFederatedState = 10;
* @return Whether the routerFederatedState field is set.
*/
boolean hasRouterFederatedState();
/**
*
* Alignment context info for use with routers.
* The client should not interpret these bytes, but only
* forward them to the router using RpcRequestHeaderProto.routerFederatedState.
*
*
* optional bytes routerFederatedState = 10;
* @return The routerFederatedState.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString getRouterFederatedState();
}
/**
*
**
* Rpc Response Header
* +------------------------------------------------------------------+
* | Rpc total response length in bytes (4 bytes int) |
* | (sum of next two parts) |
* +------------------------------------------------------------------+
* | RpcResponseHeaderProto - serialized delimited ie has len |
* +------------------------------------------------------------------+
* | if request is successful: |
* | - RpcResponse - The actual rpc response bytes follow |
* | the response header |
* | This response is serialized based on RpcKindProto |
* | if request fails : |
* | The rpc response header contains the necessary info |
* +------------------------------------------------------------------+
*
* Note that rpc response header is also used when connection setup fails.
* Ie the response looks like a rpc response with a fake callId.
*
*
* Protobuf type {@code hadoop.common.RpcResponseHeaderProto}
*/
public static final class RpcResponseHeaderProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.common.RpcResponseHeaderProto)
RpcResponseHeaderProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RpcResponseHeaderProto.newBuilder() to construct.
private RpcResponseHeaderProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RpcResponseHeaderProto() {
status_ = 0;
exceptionClassName_ = "";
errorMsg_ = "";
errorDetail_ = 1;
clientId_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
retryCount_ = -1;
routerFederatedState_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RpcResponseHeaderProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.Builder.class);
}
/**
* Protobuf enum {@code hadoop.common.RpcResponseHeaderProto.RpcStatusProto}
*/
public enum RpcStatusProto
implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
/**
*
* RPC succeeded
*
*
* SUCCESS = 0;
*/
SUCCESS(0),
/**
*
* RPC or error - connection left open for future calls
*
*
* ERROR = 1;
*/
ERROR(1),
/**
*
* Fatal error - connection closed
*
*
* FATAL = 2;
*/
FATAL(2),
;
/**
*
* RPC succeeded
*
*
* SUCCESS = 0;
*/
public static final int SUCCESS_VALUE = 0;
/**
*
* RPC or error - connection left open for future calls
*
*
* ERROR = 1;
*/
public static final int ERROR_VALUE = 1;
/**
*
* Fatal error - connection closed
*
*
* FATAL = 2;
*/
public static final int FATAL_VALUE = 2;
public final int getNumber() {
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static RpcStatusProto valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static RpcStatusProto forNumber(int value) {
switch (value) {
case 0: return SUCCESS;
case 1: return ERROR;
case 2: return FATAL;
default: return null;
}
}
public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
RpcStatusProto> internalValueMap =
new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() {
public RpcStatusProto findValueByNumber(int number) {
return RpcStatusProto.forNumber(number);
}
};
public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.getDescriptor().getEnumTypes().get(0);
}
private static final RpcStatusProto[] VALUES = values();
public static RpcStatusProto valueOf(
org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private RpcStatusProto(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hadoop.common.RpcResponseHeaderProto.RpcStatusProto)
}
/**
* Protobuf enum {@code hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto}
*/
public enum RpcErrorCodeProto
implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
/**
*
* Non-fatal Rpc error - connection left open for future rpc calls
*
*
* ERROR_APPLICATION = 1;
*/
ERROR_APPLICATION(1),
/**
*
* Rpc error - no such method
*
*
* ERROR_NO_SUCH_METHOD = 2;
*/
ERROR_NO_SUCH_METHOD(2),
/**
*
* Rpc error - no such protocol
*
*
* ERROR_NO_SUCH_PROTOCOL = 3;
*/
ERROR_NO_SUCH_PROTOCOL(3),
/**
*
* Rpc error on server side
*
*
* ERROR_RPC_SERVER = 4;
*/
ERROR_RPC_SERVER(4),
/**
*
* error serializign response
*
*
* ERROR_SERIALIZING_RESPONSE = 5;
*/
ERROR_SERIALIZING_RESPONSE(5),
/**
*
* Rpc protocol version mismatch
*
*
* ERROR_RPC_VERSION_MISMATCH = 6;
*/
ERROR_RPC_VERSION_MISMATCH(6),
/**
*
* Fatal Server side Rpc error - connection closed
*
*
* FATAL_UNKNOWN = 10;
*/
FATAL_UNKNOWN(10),
/**
*
* IPC layer serilization type invalid
*
*
* FATAL_UNSUPPORTED_SERIALIZATION = 11;
*/
FATAL_UNSUPPORTED_SERIALIZATION(11),
/**
*
* fields of RpcHeader are invalid
*
*
* FATAL_INVALID_RPC_HEADER = 12;
*/
FATAL_INVALID_RPC_HEADER(12),
/**
*
* could not deserilize rpc request
*
*
* FATAL_DESERIALIZING_REQUEST = 13;
*/
FATAL_DESERIALIZING_REQUEST(13),
/**
*
* Ipc Layer version mismatch
*
*
* FATAL_VERSION_MISMATCH = 14;
*/
FATAL_VERSION_MISMATCH(14),
/**
*
* Auth failed
*
*
* FATAL_UNAUTHORIZED = 15;
*/
FATAL_UNAUTHORIZED(15),
;
/**
*
* Non-fatal Rpc error - connection left open for future rpc calls
*
*
* ERROR_APPLICATION = 1;
*/
public static final int ERROR_APPLICATION_VALUE = 1;
/**
*
* Rpc error - no such method
*
*
* ERROR_NO_SUCH_METHOD = 2;
*/
public static final int ERROR_NO_SUCH_METHOD_VALUE = 2;
/**
*
* Rpc error - no such protocol
*
*
* ERROR_NO_SUCH_PROTOCOL = 3;
*/
public static final int ERROR_NO_SUCH_PROTOCOL_VALUE = 3;
/**
*
* Rpc error on server side
*
*
* ERROR_RPC_SERVER = 4;
*/
public static final int ERROR_RPC_SERVER_VALUE = 4;
/**
*
* error serializign response
*
*
* ERROR_SERIALIZING_RESPONSE = 5;
*/
public static final int ERROR_SERIALIZING_RESPONSE_VALUE = 5;
/**
*
* Rpc protocol version mismatch
*
*
* ERROR_RPC_VERSION_MISMATCH = 6;
*/
public static final int ERROR_RPC_VERSION_MISMATCH_VALUE = 6;
/**
*
* Fatal Server side Rpc error - connection closed
*
*
* FATAL_UNKNOWN = 10;
*/
public static final int FATAL_UNKNOWN_VALUE = 10;
/**
*
* IPC layer serilization type invalid
*
*
* FATAL_UNSUPPORTED_SERIALIZATION = 11;
*/
public static final int FATAL_UNSUPPORTED_SERIALIZATION_VALUE = 11;
/**
*
* fields of RpcHeader are invalid
*
*
* FATAL_INVALID_RPC_HEADER = 12;
*/
public static final int FATAL_INVALID_RPC_HEADER_VALUE = 12;
/**
*
* could not deserilize rpc request
*
*
* FATAL_DESERIALIZING_REQUEST = 13;
*/
public static final int FATAL_DESERIALIZING_REQUEST_VALUE = 13;
/**
*
* Ipc Layer version mismatch
*
*
* FATAL_VERSION_MISMATCH = 14;
*/
public static final int FATAL_VERSION_MISMATCH_VALUE = 14;
/**
*
* Auth failed
*
*
* FATAL_UNAUTHORIZED = 15;
*/
public static final int FATAL_UNAUTHORIZED_VALUE = 15;
public final int getNumber() {
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static RpcErrorCodeProto valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static RpcErrorCodeProto forNumber(int value) {
switch (value) {
case 1: return ERROR_APPLICATION;
case 2: return ERROR_NO_SUCH_METHOD;
case 3: return ERROR_NO_SUCH_PROTOCOL;
case 4: return ERROR_RPC_SERVER;
case 5: return ERROR_SERIALIZING_RESPONSE;
case 6: return ERROR_RPC_VERSION_MISMATCH;
case 10: return FATAL_UNKNOWN;
case 11: return FATAL_UNSUPPORTED_SERIALIZATION;
case 12: return FATAL_INVALID_RPC_HEADER;
case 13: return FATAL_DESERIALIZING_REQUEST;
case 14: return FATAL_VERSION_MISMATCH;
case 15: return FATAL_UNAUTHORIZED;
default: return null;
}
}
public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
RpcErrorCodeProto> internalValueMap =
new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() {
public RpcErrorCodeProto findValueByNumber(int number) {
return RpcErrorCodeProto.forNumber(number);
}
};
public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.getDescriptor().getEnumTypes().get(1);
}
private static final RpcErrorCodeProto[] VALUES = values();
public static RpcErrorCodeProto valueOf(
org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private RpcErrorCodeProto(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto)
}
private int bitField0_;
public static final int CALLID_FIELD_NUMBER = 1;
private int callId_ = 0;
/**
*
* callId used in Request
*
*
* required uint32 callId = 1;
* @return Whether the callId field is set.
*/
@java.lang.Override
public boolean hasCallId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
* callId used in Request
*
*
* required uint32 callId = 1;
* @return The callId.
*/
@java.lang.Override
public int getCallId() {
return callId_;
}
public static final int STATUS_FIELD_NUMBER = 2;
private int status_ = 0;
/**
* required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
* @return Whether the status field is set.
*/
@java.lang.Override public boolean hasStatus() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
* @return The status.
*/
@java.lang.Override public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto getStatus() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto result = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.forNumber(status_);
return result == null ? org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.SUCCESS : result;
}
public static final int SERVERIPCVERSIONNUM_FIELD_NUMBER = 3;
private int serverIpcVersionNum_ = 0;
/**
*
* Sent if success or fail
*
*
* optional uint32 serverIpcVersionNum = 3;
* @return Whether the serverIpcVersionNum field is set.
*/
@java.lang.Override
public boolean hasServerIpcVersionNum() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
* Sent if success or fail
*
*
* optional uint32 serverIpcVersionNum = 3;
* @return The serverIpcVersionNum.
*/
@java.lang.Override
public int getServerIpcVersionNum() {
return serverIpcVersionNum_;
}
public static final int EXCEPTIONCLASSNAME_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object exceptionClassName_ = "";
/**
*
* if request fails
*
*
* optional string exceptionClassName = 4;
* @return Whether the exceptionClassName field is set.
*/
@java.lang.Override
public boolean hasExceptionClassName() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
*
* if request fails
*
*
* optional string exceptionClassName = 4;
* @return The exceptionClassName.
*/
@java.lang.Override
public java.lang.String getExceptionClassName() {
java.lang.Object ref = exceptionClassName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
exceptionClassName_ = s;
}
return s;
}
}
/**
*
* if request fails
*
*
* optional string exceptionClassName = 4;
* @return The bytes for exceptionClassName.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getExceptionClassNameBytes() {
java.lang.Object ref = exceptionClassName_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
exceptionClassName_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int ERRORMSG_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object errorMsg_ = "";
/**
*
* if request fails, often contains strack trace
*
*
* optional string errorMsg = 5;
* @return Whether the errorMsg field is set.
*/
@java.lang.Override
public boolean hasErrorMsg() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
*
* if request fails, often contains strack trace
*
*
* optional string errorMsg = 5;
* @return The errorMsg.
*/
@java.lang.Override
public java.lang.String getErrorMsg() {
java.lang.Object ref = errorMsg_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
errorMsg_ = s;
}
return s;
}
}
/**
*
* if request fails, often contains strack trace
*
*
* optional string errorMsg = 5;
* @return The bytes for errorMsg.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getErrorMsgBytes() {
java.lang.Object ref = errorMsg_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
errorMsg_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int ERRORDETAIL_FIELD_NUMBER = 6;
private int errorDetail_ = 1;
/**
*
* in case of error
*
*
* optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
* @return Whether the errorDetail field is set.
*/
@java.lang.Override public boolean hasErrorDetail() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
*
* in case of error
*
*
* optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
* @return The errorDetail.
*/
@java.lang.Override public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto getErrorDetail() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto result = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.forNumber(errorDetail_);
return result == null ? org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.ERROR_APPLICATION : result;
}
public static final int CLIENTID_FIELD_NUMBER = 7;
private org.apache.hadoop.thirdparty.protobuf.ByteString clientId_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
/**
*
* Globally unique client ID
*
*
* optional bytes clientId = 7;
* @return Whether the clientId field is set.
*/
@java.lang.Override
public boolean hasClientId() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
*
* Globally unique client ID
*
*
* optional bytes clientId = 7;
* @return The clientId.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString getClientId() {
return clientId_;
}
public static final int RETRYCOUNT_FIELD_NUMBER = 8;
private int retryCount_ = -1;
/**
* optional sint32 retryCount = 8 [default = -1];
* @return Whether the retryCount field is set.
*/
@java.lang.Override
public boolean hasRetryCount() {
return ((bitField0_ & 0x00000080) != 0);
}
/**
* optional sint32 retryCount = 8 [default = -1];
* @return The retryCount.
*/
@java.lang.Override
public int getRetryCount() {
return retryCount_;
}
public static final int STATEID_FIELD_NUMBER = 9;
private long stateId_ = 0L;
/**
*
* The last written Global State ID
*
*
* optional int64 stateId = 9;
* @return Whether the stateId field is set.
*/
@java.lang.Override
public boolean hasStateId() {
return ((bitField0_ & 0x00000100) != 0);
}
/**
*
* The last written Global State ID
*
*
* optional int64 stateId = 9;
* @return The stateId.
*/
@java.lang.Override
public long getStateId() {
return stateId_;
}
public static final int ROUTERFEDERATEDSTATE_FIELD_NUMBER = 10;
private org.apache.hadoop.thirdparty.protobuf.ByteString routerFederatedState_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
/**
*
* Alignment context info for use with routers.
* The client should not interpret these bytes, but only
* forward them to the router using RpcRequestHeaderProto.routerFederatedState.
*
*
* optional bytes routerFederatedState = 10;
* @return Whether the routerFederatedState field is set.
*/
@java.lang.Override
public boolean hasRouterFederatedState() {
return ((bitField0_ & 0x00000200) != 0);
}
/**
*
* Alignment context info for use with routers.
* The client should not interpret these bytes, but only
* forward them to the router using RpcRequestHeaderProto.routerFederatedState.
*
*
* optional bytes routerFederatedState = 10;
* @return The routerFederatedState.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString getRouterFederatedState() {
return routerFederatedState_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasCallId()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasStatus()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeUInt32(1, callId_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeEnum(2, status_);
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeUInt32(3, serverIpcVersionNum_);
}
if (((bitField0_ & 0x00000008) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, exceptionClassName_);
}
if (((bitField0_ & 0x00000010) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, errorMsg_);
}
if (((bitField0_ & 0x00000020) != 0)) {
output.writeEnum(6, errorDetail_);
}
if (((bitField0_ & 0x00000040) != 0)) {
output.writeBytes(7, clientId_);
}
if (((bitField0_ & 0x00000080) != 0)) {
output.writeSInt32(8, retryCount_);
}
if (((bitField0_ & 0x00000100) != 0)) {
output.writeInt64(9, stateId_);
}
if (((bitField0_ & 0x00000200) != 0)) {
output.writeBytes(10, routerFederatedState_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeUInt32Size(1, callId_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeEnumSize(2, status_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeUInt32Size(3, serverIpcVersionNum_);
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(4, exceptionClassName_);
}
if (((bitField0_ & 0x00000010) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, errorMsg_);
}
if (((bitField0_ & 0x00000020) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeEnumSize(6, errorDetail_);
}
if (((bitField0_ & 0x00000040) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeBytesSize(7, clientId_);
}
if (((bitField0_ & 0x00000080) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeSInt32Size(8, retryCount_);
}
if (((bitField0_ & 0x00000100) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt64Size(9, stateId_);
}
if (((bitField0_ & 0x00000200) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeBytesSize(10, routerFederatedState_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto)) {
return super.equals(obj);
}
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto) obj;
if (hasCallId() != other.hasCallId()) return false;
if (hasCallId()) {
if (getCallId()
!= other.getCallId()) return false;
}
if (hasStatus() != other.hasStatus()) return false;
if (hasStatus()) {
if (status_ != other.status_) return false;
}
if (hasServerIpcVersionNum() != other.hasServerIpcVersionNum()) return false;
if (hasServerIpcVersionNum()) {
if (getServerIpcVersionNum()
!= other.getServerIpcVersionNum()) return false;
}
if (hasExceptionClassName() != other.hasExceptionClassName()) return false;
if (hasExceptionClassName()) {
if (!getExceptionClassName()
.equals(other.getExceptionClassName())) return false;
}
if (hasErrorMsg() != other.hasErrorMsg()) return false;
if (hasErrorMsg()) {
if (!getErrorMsg()
.equals(other.getErrorMsg())) return false;
}
if (hasErrorDetail() != other.hasErrorDetail()) return false;
if (hasErrorDetail()) {
if (errorDetail_ != other.errorDetail_) return false;
}
if (hasClientId() != other.hasClientId()) return false;
if (hasClientId()) {
if (!getClientId()
.equals(other.getClientId())) return false;
}
if (hasRetryCount() != other.hasRetryCount()) return false;
if (hasRetryCount()) {
if (getRetryCount()
!= other.getRetryCount()) return false;
}
if (hasStateId() != other.hasStateId()) return false;
if (hasStateId()) {
if (getStateId()
!= other.getStateId()) return false;
}
if (hasRouterFederatedState() != other.hasRouterFederatedState()) return false;
if (hasRouterFederatedState()) {
if (!getRouterFederatedState()
.equals(other.getRouterFederatedState())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasCallId()) {
hash = (37 * hash) + CALLID_FIELD_NUMBER;
hash = (53 * hash) + getCallId();
}
if (hasStatus()) {
hash = (37 * hash) + STATUS_FIELD_NUMBER;
hash = (53 * hash) + status_;
}
if (hasServerIpcVersionNum()) {
hash = (37 * hash) + SERVERIPCVERSIONNUM_FIELD_NUMBER;
hash = (53 * hash) + getServerIpcVersionNum();
}
if (hasExceptionClassName()) {
hash = (37 * hash) + EXCEPTIONCLASSNAME_FIELD_NUMBER;
hash = (53 * hash) + getExceptionClassName().hashCode();
}
if (hasErrorMsg()) {
hash = (37 * hash) + ERRORMSG_FIELD_NUMBER;
hash = (53 * hash) + getErrorMsg().hashCode();
}
if (hasErrorDetail()) {
hash = (37 * hash) + ERRORDETAIL_FIELD_NUMBER;
hash = (53 * hash) + errorDetail_;
}
if (hasClientId()) {
hash = (37 * hash) + CLIENTID_FIELD_NUMBER;
hash = (53 * hash) + getClientId().hashCode();
}
if (hasRetryCount()) {
hash = (37 * hash) + RETRYCOUNT_FIELD_NUMBER;
hash = (53 * hash) + getRetryCount();
}
if (hasStateId()) {
hash = (37 * hash) + STATEID_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getStateId());
}
if (hasRouterFederatedState()) {
hash = (37 * hash) + ROUTERFEDERATEDSTATE_FIELD_NUMBER;
hash = (53 * hash) + getRouterFederatedState().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
**
* Rpc Response Header
* +------------------------------------------------------------------+
* | Rpc total response length in bytes (4 bytes int) |
* | (sum of next two parts) |
* +------------------------------------------------------------------+
* | RpcResponseHeaderProto - serialized delimited ie has len |
* +------------------------------------------------------------------+
* | if request is successful: |
* | - RpcResponse - The actual rpc response bytes follow |
* | the response header |
* | This response is serialized based on RpcKindProto |
* | if request fails : |
* | The rpc response header contains the necessary info |
* +------------------------------------------------------------------+
*
* Note that rpc response header is also used when connection setup fails.
* Ie the response looks like a rpc response with a fake callId.
*
*
* Protobuf type {@code hadoop.common.RpcResponseHeaderProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.common.RpcResponseHeaderProto)
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.Builder.class);
}
// Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
callId_ = 0;
status_ = 0;
serverIpcVersionNum_ = 0;
exceptionClassName_ = "";
errorMsg_ = "";
errorDetail_ = 1;
clientId_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
retryCount_ = -1;
stateId_ = 0L;
routerFederatedState_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto getDefaultInstanceForType() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto build() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto buildPartial() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.callId_ = callId_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.status_ = status_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.serverIpcVersionNum_ = serverIpcVersionNum_;
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.exceptionClassName_ = exceptionClassName_;
to_bitField0_ |= 0x00000008;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.errorMsg_ = errorMsg_;
to_bitField0_ |= 0x00000010;
}
if (((from_bitField0_ & 0x00000020) != 0)) {
result.errorDetail_ = errorDetail_;
to_bitField0_ |= 0x00000020;
}
if (((from_bitField0_ & 0x00000040) != 0)) {
result.clientId_ = clientId_;
to_bitField0_ |= 0x00000040;
}
if (((from_bitField0_ & 0x00000080) != 0)) {
result.retryCount_ = retryCount_;
to_bitField0_ |= 0x00000080;
}
if (((from_bitField0_ & 0x00000100) != 0)) {
result.stateId_ = stateId_;
to_bitField0_ |= 0x00000100;
}
if (((from_bitField0_ & 0x00000200) != 0)) {
result.routerFederatedState_ = routerFederatedState_;
to_bitField0_ |= 0x00000200;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto) {
return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto other) {
if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.getDefaultInstance()) return this;
if (other.hasCallId()) {
setCallId(other.getCallId());
}
if (other.hasStatus()) {
setStatus(other.getStatus());
}
if (other.hasServerIpcVersionNum()) {
setServerIpcVersionNum(other.getServerIpcVersionNum());
}
if (other.hasExceptionClassName()) {
exceptionClassName_ = other.exceptionClassName_;
bitField0_ |= 0x00000008;
onChanged();
}
if (other.hasErrorMsg()) {
errorMsg_ = other.errorMsg_;
bitField0_ |= 0x00000010;
onChanged();
}
if (other.hasErrorDetail()) {
setErrorDetail(other.getErrorDetail());
}
if (other.hasClientId()) {
setClientId(other.getClientId());
}
if (other.hasRetryCount()) {
setRetryCount(other.getRetryCount());
}
if (other.hasStateId()) {
setStateId(other.getStateId());
}
if (other.hasRouterFederatedState()) {
setRouterFederatedState(other.getRouterFederatedState());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasCallId()) {
return false;
}
if (!hasStatus()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
callId_ = input.readUInt32();
bitField0_ |= 0x00000001;
break;
} // case 8
case 16: {
int tmpRaw = input.readEnum();
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto tmpValue =
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.forNumber(tmpRaw);
if (tmpValue == null) {
mergeUnknownVarintField(2, tmpRaw);
} else {
status_ = tmpRaw;
bitField0_ |= 0x00000002;
}
break;
} // case 16
case 24: {
serverIpcVersionNum_ = input.readUInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34: {
exceptionClassName_ = input.readBytes();
bitField0_ |= 0x00000008;
break;
} // case 34
case 42: {
errorMsg_ = input.readBytes();
bitField0_ |= 0x00000010;
break;
} // case 42
case 48: {
int tmpRaw = input.readEnum();
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto tmpValue =
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.forNumber(tmpRaw);
if (tmpValue == null) {
mergeUnknownVarintField(6, tmpRaw);
} else {
errorDetail_ = tmpRaw;
bitField0_ |= 0x00000020;
}
break;
} // case 48
case 58: {
clientId_ = input.readBytes();
bitField0_ |= 0x00000040;
break;
} // case 58
case 64: {
retryCount_ = input.readSInt32();
bitField0_ |= 0x00000080;
break;
} // case 64
case 72: {
stateId_ = input.readInt64();
bitField0_ |= 0x00000100;
break;
} // case 72
case 82: {
routerFederatedState_ = input.readBytes();
bitField0_ |= 0x00000200;
break;
} // case 82
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int callId_ ;
/**
*
* callId used in Request
*
*
* required uint32 callId = 1;
* @return Whether the callId field is set.
*/
@java.lang.Override
public boolean hasCallId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
* callId used in Request
*
*
* required uint32 callId = 1;
* @return The callId.
*/
@java.lang.Override
public int getCallId() {
return callId_;
}
/**
*
* callId used in Request
*
*
* required uint32 callId = 1;
* @param value The callId to set.
* @return This builder for chaining.
*/
public Builder setCallId(int value) {
callId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
* callId used in Request
*
*
* required uint32 callId = 1;
* @return This builder for chaining.
*/
public Builder clearCallId() {
bitField0_ = (bitField0_ & ~0x00000001);
callId_ = 0;
onChanged();
return this;
}
private int status_ = 0;
/**
* required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
* @return Whether the status field is set.
*/
@java.lang.Override public boolean hasStatus() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
* @return The status.
*/
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto getStatus() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto result = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.forNumber(status_);
return result == null ? org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.SUCCESS : result;
}
/**
* required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
* @param value The status to set.
* @return This builder for chaining.
*/
public Builder setStatus(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
status_ = value.getNumber();
onChanged();
return this;
}
/**
* required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
* @return This builder for chaining.
*/
public Builder clearStatus() {
bitField0_ = (bitField0_ & ~0x00000002);
status_ = 0;
onChanged();
return this;
}
private int serverIpcVersionNum_ ;
/**
*
* Sent if success or fail
*
*
* optional uint32 serverIpcVersionNum = 3;
* @return Whether the serverIpcVersionNum field is set.
*/
@java.lang.Override
public boolean hasServerIpcVersionNum() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
* Sent if success or fail
*
*
* optional uint32 serverIpcVersionNum = 3;
* @return The serverIpcVersionNum.
*/
@java.lang.Override
public int getServerIpcVersionNum() {
return serverIpcVersionNum_;
}
/**
*
* Sent if success or fail
*
*
* optional uint32 serverIpcVersionNum = 3;
* @param value The serverIpcVersionNum to set.
* @return This builder for chaining.
*/
public Builder setServerIpcVersionNum(int value) {
serverIpcVersionNum_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
* Sent if success or fail
*
*
* optional uint32 serverIpcVersionNum = 3;
* @return This builder for chaining.
*/
public Builder clearServerIpcVersionNum() {
bitField0_ = (bitField0_ & ~0x00000004);
serverIpcVersionNum_ = 0;
onChanged();
return this;
}
private java.lang.Object exceptionClassName_ = "";
/**
*
* if request fails
*
*
* optional string exceptionClassName = 4;
* @return Whether the exceptionClassName field is set.
*/
public boolean hasExceptionClassName() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
*
* if request fails
*
*
* optional string exceptionClassName = 4;
* @return The exceptionClassName.
*/
public java.lang.String getExceptionClassName() {
java.lang.Object ref = exceptionClassName_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
exceptionClassName_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
* if request fails
*
*
* optional string exceptionClassName = 4;
* @return The bytes for exceptionClassName.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getExceptionClassNameBytes() {
java.lang.Object ref = exceptionClassName_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
exceptionClassName_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
*
* if request fails
*
*
* optional string exceptionClassName = 4;
* @param value The exceptionClassName to set.
* @return This builder for chaining.
*/
public Builder setExceptionClassName(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
exceptionClassName_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
* if request fails
*
*
* optional string exceptionClassName = 4;
* @return This builder for chaining.
*/
public Builder clearExceptionClassName() {
exceptionClassName_ = getDefaultInstance().getExceptionClassName();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
* if request fails
*
*
* optional string exceptionClassName = 4;
* @param value The bytes for exceptionClassName to set.
* @return This builder for chaining.
*/
public Builder setExceptionClassNameBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
exceptionClassName_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
private java.lang.Object errorMsg_ = "";
/**
*
* if request fails, often contains strack trace
*
*
* optional string errorMsg = 5;
* @return Whether the errorMsg field is set.
*/
public boolean hasErrorMsg() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
*
* if request fails, often contains strack trace
*
*
* optional string errorMsg = 5;
* @return The errorMsg.
*/
public java.lang.String getErrorMsg() {
java.lang.Object ref = errorMsg_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
errorMsg_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
* if request fails, often contains strack trace
*
*
* optional string errorMsg = 5;
* @return The bytes for errorMsg.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getErrorMsgBytes() {
java.lang.Object ref = errorMsg_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
errorMsg_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
*
* if request fails, often contains strack trace
*
*
* optional string errorMsg = 5;
* @param value The errorMsg to set.
* @return This builder for chaining.
*/
public Builder setErrorMsg(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
errorMsg_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
* if request fails, often contains strack trace
*
*
* optional string errorMsg = 5;
* @return This builder for chaining.
*/
public Builder clearErrorMsg() {
errorMsg_ = getDefaultInstance().getErrorMsg();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
return this;
}
/**
*
* if request fails, often contains strack trace
*
*
* optional string errorMsg = 5;
* @param value The bytes for errorMsg to set.
* @return This builder for chaining.
*/
public Builder setErrorMsgBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
errorMsg_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
private int errorDetail_ = 1;
/**
*
* in case of error
*
*
* optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
* @return Whether the errorDetail field is set.
*/
@java.lang.Override public boolean hasErrorDetail() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
*
* in case of error
*
*
* optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
* @return The errorDetail.
*/
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto getErrorDetail() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto result = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.forNumber(errorDetail_);
return result == null ? org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.ERROR_APPLICATION : result;
}
/**
*
* in case of error
*
*
* optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
* @param value The errorDetail to set.
* @return This builder for chaining.
*/
public Builder setErrorDetail(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000020;
errorDetail_ = value.getNumber();
onChanged();
return this;
}
/**
*
* in case of error
*
*
* optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
* @return This builder for chaining.
*/
public Builder clearErrorDetail() {
bitField0_ = (bitField0_ & ~0x00000020);
errorDetail_ = 1;
onChanged();
return this;
}
private org.apache.hadoop.thirdparty.protobuf.ByteString clientId_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
/**
*
* Globally unique client ID
*
*
* optional bytes clientId = 7;
* @return Whether the clientId field is set.
*/
@java.lang.Override
public boolean hasClientId() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
*
* Globally unique client ID
*
*
* optional bytes clientId = 7;
* @return The clientId.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString getClientId() {
return clientId_;
}
/**
*
* Globally unique client ID
*
*
* optional bytes clientId = 7;
* @param value The clientId to set.
* @return This builder for chaining.
*/
public Builder setClientId(org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
clientId_ = value;
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
*
* Globally unique client ID
*
*
* optional bytes clientId = 7;
* @return This builder for chaining.
*/
public Builder clearClientId() {
bitField0_ = (bitField0_ & ~0x00000040);
clientId_ = getDefaultInstance().getClientId();
onChanged();
return this;
}
private int retryCount_ = -1;
/**
* optional sint32 retryCount = 8 [default = -1];
* @return Whether the retryCount field is set.
*/
@java.lang.Override
public boolean hasRetryCount() {
return ((bitField0_ & 0x00000080) != 0);
}
/**
* optional sint32 retryCount = 8 [default = -1];
* @return The retryCount.
*/
@java.lang.Override
public int getRetryCount() {
return retryCount_;
}
/**
* optional sint32 retryCount = 8 [default = -1];
* @param value The retryCount to set.
* @return This builder for chaining.
*/
public Builder setRetryCount(int value) {
retryCount_ = value;
bitField0_ |= 0x00000080;
onChanged();
return this;
}
/**
* optional sint32 retryCount = 8 [default = -1];
* @return This builder for chaining.
*/
public Builder clearRetryCount() {
bitField0_ = (bitField0_ & ~0x00000080);
retryCount_ = -1;
onChanged();
return this;
}
private long stateId_ ;
/**
*
* The last written Global State ID
*
*
* optional int64 stateId = 9;
* @return Whether the stateId field is set.
*/
@java.lang.Override
public boolean hasStateId() {
return ((bitField0_ & 0x00000100) != 0);
}
/**
*
* The last written Global State ID
*
*
* optional int64 stateId = 9;
* @return The stateId.
*/
@java.lang.Override
public long getStateId() {
return stateId_;
}
/**
*
* The last written Global State ID
*
*
* optional int64 stateId = 9;
* @param value The stateId to set.
* @return This builder for chaining.
*/
public Builder setStateId(long value) {
stateId_ = value;
bitField0_ |= 0x00000100;
onChanged();
return this;
}
/**
*
* The last written Global State ID
*
*
* optional int64 stateId = 9;
* @return This builder for chaining.
*/
public Builder clearStateId() {
bitField0_ = (bitField0_ & ~0x00000100);
stateId_ = 0L;
onChanged();
return this;
}
private org.apache.hadoop.thirdparty.protobuf.ByteString routerFederatedState_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
/**
*
* Alignment context info for use with routers.
* The client should not interpret these bytes, but only
* forward them to the router using RpcRequestHeaderProto.routerFederatedState.
*
*
* optional bytes routerFederatedState = 10;
* @return Whether the routerFederatedState field is set.
*/
@java.lang.Override
public boolean hasRouterFederatedState() {
return ((bitField0_ & 0x00000200) != 0);
}
/**
*
* Alignment context info for use with routers.
* The client should not interpret these bytes, but only
* forward them to the router using RpcRequestHeaderProto.routerFederatedState.
*
*
* optional bytes routerFederatedState = 10;
* @return The routerFederatedState.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString getRouterFederatedState() {
return routerFederatedState_;
}
/**
*
* Alignment context info for use with routers.
* The client should not interpret these bytes, but only
* forward them to the router using RpcRequestHeaderProto.routerFederatedState.
*
*
* optional bytes routerFederatedState = 10;
* @param value The routerFederatedState to set.
* @return This builder for chaining.
*/
public Builder setRouterFederatedState(org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
routerFederatedState_ = value;
bitField0_ |= 0x00000200;
onChanged();
return this;
}
/**
*
* Alignment context info for use with routers.
* The client should not interpret these bytes, but only
* forward them to the router using RpcRequestHeaderProto.routerFederatedState.
*
*
* optional bytes routerFederatedState = 10;
* @return This builder for chaining.
*/
public Builder clearRouterFederatedState() {
bitField0_ = (bitField0_ & ~0x00000200);
routerFederatedState_ = getDefaultInstance().getRouterFederatedState();
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.common.RpcResponseHeaderProto)
}
// @@protoc_insertion_point(class_scope:hadoop.common.RpcResponseHeaderProto)
private static final org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto();
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RpcResponseHeaderProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RpcSaslProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.common.RpcSaslProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional uint32 version = 1;
* @return Whether the version field is set.
*/
boolean hasVersion();
/**
* optional uint32 version = 1;
* @return The version.
*/
int getVersion();
/**
* required .hadoop.common.RpcSaslProto.SaslState state = 2;
* @return Whether the state field is set.
*/
boolean hasState();
/**
* required .hadoop.common.RpcSaslProto.SaslState state = 2;
* @return The state.
*/
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState getState();
/**
* optional bytes token = 3;
* @return Whether the token field is set.
*/
boolean hasToken();
/**
* optional bytes token = 3;
* @return The token.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString getToken();
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
java.util.List
getAuthsList();
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth getAuths(int index);
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
int getAuthsCount();
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
java.util.List extends org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder>
getAuthsOrBuilderList();
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder getAuthsOrBuilder(
int index);
}
/**
* Protobuf type {@code hadoop.common.RpcSaslProto}
*/
public static final class RpcSaslProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.common.RpcSaslProto)
RpcSaslProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RpcSaslProto.newBuilder() to construct.
private RpcSaslProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RpcSaslProto() {
state_ = 0;
token_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
auths_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RpcSaslProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.Builder.class);
}
/**
* Protobuf enum {@code hadoop.common.RpcSaslProto.SaslState}
*/
public enum SaslState
implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
/**
* SUCCESS = 0;
*/
SUCCESS(0),
/**
* NEGOTIATE = 1;
*/
NEGOTIATE(1),
/**
* INITIATE = 2;
*/
INITIATE(2),
/**
* CHALLENGE = 3;
*/
CHALLENGE(3),
/**
* RESPONSE = 4;
*/
RESPONSE(4),
/**
* WRAP = 5;
*/
WRAP(5),
;
/**
* SUCCESS = 0;
*/
public static final int SUCCESS_VALUE = 0;
/**
* NEGOTIATE = 1;
*/
public static final int NEGOTIATE_VALUE = 1;
/**
* INITIATE = 2;
*/
public static final int INITIATE_VALUE = 2;
/**
* CHALLENGE = 3;
*/
public static final int CHALLENGE_VALUE = 3;
/**
* RESPONSE = 4;
*/
public static final int RESPONSE_VALUE = 4;
/**
* WRAP = 5;
*/
public static final int WRAP_VALUE = 5;
public final int getNumber() {
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static SaslState valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static SaslState forNumber(int value) {
switch (value) {
case 0: return SUCCESS;
case 1: return NEGOTIATE;
case 2: return INITIATE;
case 3: return CHALLENGE;
case 4: return RESPONSE;
case 5: return WRAP;
default: return null;
}
}
public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
SaslState> internalValueMap =
new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() {
public SaslState findValueByNumber(int number) {
return SaslState.forNumber(number);
}
};
public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.getDescriptor().getEnumTypes().get(0);
}
private static final SaslState[] VALUES = values();
public static SaslState valueOf(
org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private SaslState(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hadoop.common.RpcSaslProto.SaslState)
}
public interface SaslAuthOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.common.RpcSaslProto.SaslAuth)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required string method = 1;
* @return Whether the method field is set.
*/
boolean hasMethod();
/**
* required string method = 1;
* @return The method.
*/
java.lang.String getMethod();
/**
* required string method = 1;
* @return The bytes for method.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getMethodBytes();
/**
* required string mechanism = 2;
* @return Whether the mechanism field is set.
*/
boolean hasMechanism();
/**
* required string mechanism = 2;
* @return The mechanism.
*/
java.lang.String getMechanism();
/**
* required string mechanism = 2;
* @return The bytes for mechanism.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getMechanismBytes();
/**
* optional string protocol = 3;
* @return Whether the protocol field is set.
*/
boolean hasProtocol();
/**
* optional string protocol = 3;
* @return The protocol.
*/
java.lang.String getProtocol();
/**
* optional string protocol = 3;
* @return The bytes for protocol.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getProtocolBytes();
/**
* optional string serverId = 4;
* @return Whether the serverId field is set.
*/
boolean hasServerId();
/**
* optional string serverId = 4;
* @return The serverId.
*/
java.lang.String getServerId();
/**
* optional string serverId = 4;
* @return The bytes for serverId.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getServerIdBytes();
/**
* optional bytes challenge = 5;
* @return Whether the challenge field is set.
*/
boolean hasChallenge();
/**
* optional bytes challenge = 5;
* @return The challenge.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString getChallenge();
}
/**
* Protobuf type {@code hadoop.common.RpcSaslProto.SaslAuth}
*/
public static final class SaslAuth extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.common.RpcSaslProto.SaslAuth)
SaslAuthOrBuilder {
private static final long serialVersionUID = 0L;
// Use SaslAuth.newBuilder() to construct.
private SaslAuth(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private SaslAuth() {
method_ = "";
mechanism_ = "";
protocol_ = "";
serverId_ = "";
challenge_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SaslAuth();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder.class);
}
private int bitField0_;
public static final int METHOD_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object method_ = "";
/**
* required string method = 1;
* @return Whether the method field is set.
*/
@java.lang.Override
public boolean hasMethod() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string method = 1;
* @return The method.
*/
@java.lang.Override
public java.lang.String getMethod() {
java.lang.Object ref = method_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
method_ = s;
}
return s;
}
}
/**
* required string method = 1;
* @return The bytes for method.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getMethodBytes() {
java.lang.Object ref = method_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
method_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int MECHANISM_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object mechanism_ = "";
/**
* required string mechanism = 2;
* @return Whether the mechanism field is set.
*/
@java.lang.Override
public boolean hasMechanism() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required string mechanism = 2;
* @return The mechanism.
*/
@java.lang.Override
public java.lang.String getMechanism() {
java.lang.Object ref = mechanism_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
mechanism_ = s;
}
return s;
}
}
/**
* required string mechanism = 2;
* @return The bytes for mechanism.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getMechanismBytes() {
java.lang.Object ref = mechanism_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
mechanism_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int PROTOCOL_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object protocol_ = "";
/**
* optional string protocol = 3;
* @return Whether the protocol field is set.
*/
@java.lang.Override
public boolean hasProtocol() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional string protocol = 3;
* @return The protocol.
*/
@java.lang.Override
public java.lang.String getProtocol() {
java.lang.Object ref = protocol_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
protocol_ = s;
}
return s;
}
}
/**
* optional string protocol = 3;
* @return The bytes for protocol.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getProtocolBytes() {
java.lang.Object ref = protocol_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
protocol_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int SERVERID_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object serverId_ = "";
/**
* optional string serverId = 4;
* @return Whether the serverId field is set.
*/
@java.lang.Override
public boolean hasServerId() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional string serverId = 4;
* @return The serverId.
*/
@java.lang.Override
public java.lang.String getServerId() {
java.lang.Object ref = serverId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
serverId_ = s;
}
return s;
}
}
/**
* optional string serverId = 4;
* @return The bytes for serverId.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getServerIdBytes() {
java.lang.Object ref = serverId_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
serverId_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int CHALLENGE_FIELD_NUMBER = 5;
private org.apache.hadoop.thirdparty.protobuf.ByteString challenge_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
/**
* optional bytes challenge = 5;
* @return Whether the challenge field is set.
*/
@java.lang.Override
public boolean hasChallenge() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* optional bytes challenge = 5;
* @return The challenge.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString getChallenge() {
return challenge_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasMethod()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasMechanism()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, method_);
}
if (((bitField0_ & 0x00000002) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, mechanism_);
}
if (((bitField0_ & 0x00000004) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, protocol_);
}
if (((bitField0_ & 0x00000008) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, serverId_);
}
if (((bitField0_ & 0x00000010) != 0)) {
output.writeBytes(5, challenge_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, method_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, mechanism_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, protocol_);
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(4, serverId_);
}
if (((bitField0_ & 0x00000010) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeBytesSize(5, challenge_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth)) {
return super.equals(obj);
}
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth) obj;
if (hasMethod() != other.hasMethod()) return false;
if (hasMethod()) {
if (!getMethod()
.equals(other.getMethod())) return false;
}
if (hasMechanism() != other.hasMechanism()) return false;
if (hasMechanism()) {
if (!getMechanism()
.equals(other.getMechanism())) return false;
}
if (hasProtocol() != other.hasProtocol()) return false;
if (hasProtocol()) {
if (!getProtocol()
.equals(other.getProtocol())) return false;
}
if (hasServerId() != other.hasServerId()) return false;
if (hasServerId()) {
if (!getServerId()
.equals(other.getServerId())) return false;
}
if (hasChallenge() != other.hasChallenge()) return false;
if (hasChallenge()) {
if (!getChallenge()
.equals(other.getChallenge())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasMethod()) {
hash = (37 * hash) + METHOD_FIELD_NUMBER;
hash = (53 * hash) + getMethod().hashCode();
}
if (hasMechanism()) {
hash = (37 * hash) + MECHANISM_FIELD_NUMBER;
hash = (53 * hash) + getMechanism().hashCode();
}
if (hasProtocol()) {
hash = (37 * hash) + PROTOCOL_FIELD_NUMBER;
hash = (53 * hash) + getProtocol().hashCode();
}
if (hasServerId()) {
hash = (37 * hash) + SERVERID_FIELD_NUMBER;
hash = (53 * hash) + getServerId().hashCode();
}
if (hasChallenge()) {
hash = (37 * hash) + CHALLENGE_FIELD_NUMBER;
hash = (53 * hash) + getChallenge().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.common.RpcSaslProto.SaslAuth}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.common.RpcSaslProto.SaslAuth)
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder.class);
}
// Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
method_ = "";
mechanism_ = "";
protocol_ = "";
serverId_ = "";
challenge_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor;
}
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth getDefaultInstanceForType() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth build() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth buildPartial() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.method_ = method_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.mechanism_ = mechanism_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.protocol_ = protocol_;
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.serverId_ = serverId_;
to_bitField0_ |= 0x00000008;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.challenge_ = challenge_;
to_bitField0_ |= 0x00000010;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth) {
return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth other) {
if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.getDefaultInstance()) return this;
if (other.hasMethod()) {
method_ = other.method_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasMechanism()) {
mechanism_ = other.mechanism_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasProtocol()) {
protocol_ = other.protocol_;
bitField0_ |= 0x00000004;
onChanged();
}
if (other.hasServerId()) {
serverId_ = other.serverId_;
bitField0_ |= 0x00000008;
onChanged();
}
if (other.hasChallenge()) {
setChallenge(other.getChallenge());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasMethod()) {
return false;
}
if (!hasMechanism()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
method_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
mechanism_ = input.readBytes();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26: {
protocol_ = input.readBytes();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34: {
serverId_ = input.readBytes();
bitField0_ |= 0x00000008;
break;
} // case 34
case 42: {
challenge_ = input.readBytes();
bitField0_ |= 0x00000010;
break;
} // case 42
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object method_ = "";
/**
* required string method = 1;
* @return Whether the method field is set.
*/
public boolean hasMethod() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string method = 1;
* @return The method.
*/
public java.lang.String getMethod() {
java.lang.Object ref = method_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
method_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string method = 1;
* @return The bytes for method.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getMethodBytes() {
java.lang.Object ref = method_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
method_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string method = 1;
* @param value The method to set.
* @return This builder for chaining.
*/
public Builder setMethod(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
method_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* required string method = 1;
* @return This builder for chaining.
*/
public Builder clearMethod() {
method_ = getDefaultInstance().getMethod();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* required string method = 1;
* @param value The bytes for method to set.
* @return This builder for chaining.
*/
public Builder setMethodBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
method_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object mechanism_ = "";
/**
* required string mechanism = 2;
* @return Whether the mechanism field is set.
*/
public boolean hasMechanism() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required string mechanism = 2;
* @return The mechanism.
*/
public java.lang.String getMechanism() {
java.lang.Object ref = mechanism_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
mechanism_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string mechanism = 2;
* @return The bytes for mechanism.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getMechanismBytes() {
java.lang.Object ref = mechanism_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
mechanism_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string mechanism = 2;
* @param value The mechanism to set.
* @return This builder for chaining.
*/
public Builder setMechanism(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
mechanism_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* required string mechanism = 2;
* @return This builder for chaining.
*/
public Builder clearMechanism() {
mechanism_ = getDefaultInstance().getMechanism();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* required string mechanism = 2;
* @param value The bytes for mechanism to set.
* @return This builder for chaining.
*/
public Builder setMechanismBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
mechanism_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object protocol_ = "";
/**
* optional string protocol = 3;
* @return Whether the protocol field is set.
*/
public boolean hasProtocol() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional string protocol = 3;
* @return The protocol.
*/
public java.lang.String getProtocol() {
java.lang.Object ref = protocol_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
protocol_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string protocol = 3;
* @return The bytes for protocol.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getProtocolBytes() {
java.lang.Object ref = protocol_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
protocol_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string protocol = 3;
* @param value The protocol to set.
* @return This builder for chaining.
*/
public Builder setProtocol(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
protocol_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional string protocol = 3;
* @return This builder for chaining.
*/
public Builder clearProtocol() {
protocol_ = getDefaultInstance().getProtocol();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* optional string protocol = 3;
* @param value The bytes for protocol to set.
* @return This builder for chaining.
*/
public Builder setProtocolBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
protocol_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object serverId_ = "";
/**
* optional string serverId = 4;
* @return Whether the serverId field is set.
*/
public boolean hasServerId() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional string serverId = 4;
* @return The serverId.
*/
public java.lang.String getServerId() {
java.lang.Object ref = serverId_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
serverId_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string serverId = 4;
* @return The bytes for serverId.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getServerIdBytes() {
java.lang.Object ref = serverId_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
serverId_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string serverId = 4;
* @param value The serverId to set.
* @return This builder for chaining.
*/
public Builder setServerId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
serverId_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* optional string serverId = 4;
* @return This builder for chaining.
*/
public Builder clearServerId() {
serverId_ = getDefaultInstance().getServerId();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
* optional string serverId = 4;
* @param value The bytes for serverId to set.
* @return This builder for chaining.
*/
public Builder setServerIdBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
serverId_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
private org.apache.hadoop.thirdparty.protobuf.ByteString challenge_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
/**
* optional bytes challenge = 5;
* @return Whether the challenge field is set.
*/
@java.lang.Override
public boolean hasChallenge() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* optional bytes challenge = 5;
* @return The challenge.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString getChallenge() {
return challenge_;
}
/**
* optional bytes challenge = 5;
* @param value The challenge to set.
* @return This builder for chaining.
*/
public Builder setChallenge(org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
challenge_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
* optional bytes challenge = 5;
* @return This builder for chaining.
*/
public Builder clearChallenge() {
bitField0_ = (bitField0_ & ~0x00000010);
challenge_ = getDefaultInstance().getChallenge();
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.common.RpcSaslProto.SaslAuth)
}
// @@protoc_insertion_point(class_scope:hadoop.common.RpcSaslProto.SaslAuth)
private static final org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth();
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public SaslAuth parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
private int bitField0_;
public static final int VERSION_FIELD_NUMBER = 1;
private int version_ = 0;
/**
* optional uint32 version = 1;
* @return Whether the version field is set.
*/
@java.lang.Override
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional uint32 version = 1;
* @return The version.
*/
@java.lang.Override
public int getVersion() {
return version_;
}
public static final int STATE_FIELD_NUMBER = 2;
private int state_ = 0;
/**
* required .hadoop.common.RpcSaslProto.SaslState state = 2;
* @return Whether the state field is set.
*/
@java.lang.Override public boolean hasState() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required .hadoop.common.RpcSaslProto.SaslState state = 2;
* @return The state.
*/
@java.lang.Override public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState getState() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState result = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.forNumber(state_);
return result == null ? org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.SUCCESS : result;
}
public static final int TOKEN_FIELD_NUMBER = 3;
private org.apache.hadoop.thirdparty.protobuf.ByteString token_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
/**
* optional bytes token = 3;
* @return Whether the token field is set.
*/
@java.lang.Override
public boolean hasToken() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional bytes token = 3;
* @return The token.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString getToken() {
return token_;
}
public static final int AUTHS_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private java.util.List auths_;
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
@java.lang.Override
public java.util.List getAuthsList() {
return auths_;
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder>
getAuthsOrBuilderList() {
return auths_;
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
@java.lang.Override
public int getAuthsCount() {
return auths_.size();
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth getAuths(int index) {
return auths_.get(index);
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder getAuthsOrBuilder(
int index) {
return auths_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasState()) {
memoizedIsInitialized = 0;
return false;
}
for (int i = 0; i < getAuthsCount(); i++) {
if (!getAuths(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeUInt32(1, version_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeEnum(2, state_);
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeBytes(3, token_);
}
for (int i = 0; i < auths_.size(); i++) {
output.writeMessage(4, auths_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeUInt32Size(1, version_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeEnumSize(2, state_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeBytesSize(3, token_);
}
for (int i = 0; i < auths_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(4, auths_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto)) {
return super.equals(obj);
}
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto) obj;
if (hasVersion() != other.hasVersion()) return false;
if (hasVersion()) {
if (getVersion()
!= other.getVersion()) return false;
}
if (hasState() != other.hasState()) return false;
if (hasState()) {
if (state_ != other.state_) return false;
}
if (hasToken() != other.hasToken()) return false;
if (hasToken()) {
if (!getToken()
.equals(other.getToken())) return false;
}
if (!getAuthsList()
.equals(other.getAuthsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasVersion()) {
hash = (37 * hash) + VERSION_FIELD_NUMBER;
hash = (53 * hash) + getVersion();
}
if (hasState()) {
hash = (37 * hash) + STATE_FIELD_NUMBER;
hash = (53 * hash) + state_;
}
if (hasToken()) {
hash = (37 * hash) + TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getToken().hashCode();
}
if (getAuthsCount() > 0) {
hash = (37 * hash) + AUTHS_FIELD_NUMBER;
hash = (53 * hash) + getAuthsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.common.RpcSaslProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.common.RpcSaslProto)
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.Builder.class);
}
// Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
version_ = 0;
state_ = 0;
token_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
if (authsBuilder_ == null) {
auths_ = java.util.Collections.emptyList();
} else {
auths_ = null;
authsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto getDefaultInstanceForType() {
return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto build() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto buildPartial() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto result) {
if (authsBuilder_ == null) {
if (((bitField0_ & 0x00000008) != 0)) {
auths_ = java.util.Collections.unmodifiableList(auths_);
bitField0_ = (bitField0_ & ~0x00000008);
}
result.auths_ = auths_;
} else {
result.auths_ = authsBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.version_ = version_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.state_ = state_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.token_ = token_;
to_bitField0_ |= 0x00000004;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto) {
return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto other) {
if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.getDefaultInstance()) return this;
if (other.hasVersion()) {
setVersion(other.getVersion());
}
if (other.hasState()) {
setState(other.getState());
}
if (other.hasToken()) {
setToken(other.getToken());
}
if (authsBuilder_ == null) {
if (!other.auths_.isEmpty()) {
if (auths_.isEmpty()) {
auths_ = other.auths_;
bitField0_ = (bitField0_ & ~0x00000008);
} else {
ensureAuthsIsMutable();
auths_.addAll(other.auths_);
}
onChanged();
}
} else {
if (!other.auths_.isEmpty()) {
if (authsBuilder_.isEmpty()) {
authsBuilder_.dispose();
authsBuilder_ = null;
auths_ = other.auths_;
bitField0_ = (bitField0_ & ~0x00000008);
authsBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getAuthsFieldBuilder() : null;
} else {
authsBuilder_.addAllMessages(other.auths_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasState()) {
return false;
}
for (int i = 0; i < getAuthsCount(); i++) {
if (!getAuths(i).isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
version_ = input.readUInt32();
bitField0_ |= 0x00000001;
break;
} // case 8
case 16: {
int tmpRaw = input.readEnum();
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState tmpValue =
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.forNumber(tmpRaw);
if (tmpValue == null) {
mergeUnknownVarintField(2, tmpRaw);
} else {
state_ = tmpRaw;
bitField0_ |= 0x00000002;
}
break;
} // case 16
case 26: {
token_ = input.readBytes();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34: {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth m =
input.readMessage(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.PARSER,
extensionRegistry);
if (authsBuilder_ == null) {
ensureAuthsIsMutable();
auths_.add(m);
} else {
authsBuilder_.addMessage(m);
}
break;
} // case 34
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int version_ ;
/**
* optional uint32 version = 1;
* @return Whether the version field is set.
*/
@java.lang.Override
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional uint32 version = 1;
* @return The version.
*/
@java.lang.Override
public int getVersion() {
return version_;
}
/**
* optional uint32 version = 1;
* @param value The version to set.
* @return This builder for chaining.
*/
public Builder setVersion(int value) {
version_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional uint32 version = 1;
* @return This builder for chaining.
*/
public Builder clearVersion() {
bitField0_ = (bitField0_ & ~0x00000001);
version_ = 0;
onChanged();
return this;
}
private int state_ = 0;
/**
* required .hadoop.common.RpcSaslProto.SaslState state = 2;
* @return Whether the state field is set.
*/
@java.lang.Override public boolean hasState() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required .hadoop.common.RpcSaslProto.SaslState state = 2;
* @return The state.
*/
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState getState() {
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState result = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.forNumber(state_);
return result == null ? org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.SUCCESS : result;
}
/**
* required .hadoop.common.RpcSaslProto.SaslState state = 2;
* @param value The state to set.
* @return This builder for chaining.
*/
public Builder setState(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
state_ = value.getNumber();
onChanged();
return this;
}
/**
* required .hadoop.common.RpcSaslProto.SaslState state = 2;
* @return This builder for chaining.
*/
public Builder clearState() {
bitField0_ = (bitField0_ & ~0x00000002);
state_ = 0;
onChanged();
return this;
}
private org.apache.hadoop.thirdparty.protobuf.ByteString token_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
/**
* optional bytes token = 3;
* @return Whether the token field is set.
*/
@java.lang.Override
public boolean hasToken() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional bytes token = 3;
* @return The token.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString getToken() {
return token_;
}
/**
* optional bytes token = 3;
* @param value The token to set.
* @return This builder for chaining.
*/
public Builder setToken(org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
token_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional bytes token = 3;
* @return This builder for chaining.
*/
public Builder clearToken() {
bitField0_ = (bitField0_ & ~0x00000004);
token_ = getDefaultInstance().getToken();
onChanged();
return this;
}
private java.util.List auths_ =
java.util.Collections.emptyList();
private void ensureAuthsIsMutable() {
if (!((bitField0_ & 0x00000008) != 0)) {
auths_ = new java.util.ArrayList(auths_);
bitField0_ |= 0x00000008;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder> authsBuilder_;
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public java.util.List getAuthsList() {
if (authsBuilder_ == null) {
return java.util.Collections.unmodifiableList(auths_);
} else {
return authsBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public int getAuthsCount() {
if (authsBuilder_ == null) {
return auths_.size();
} else {
return authsBuilder_.getCount();
}
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth getAuths(int index) {
if (authsBuilder_ == null) {
return auths_.get(index);
} else {
return authsBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public Builder setAuths(
int index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth value) {
if (authsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAuthsIsMutable();
auths_.set(index, value);
onChanged();
} else {
authsBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public Builder setAuths(
int index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder builderForValue) {
if (authsBuilder_ == null) {
ensureAuthsIsMutable();
auths_.set(index, builderForValue.build());
onChanged();
} else {
authsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public Builder addAuths(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth value) {
if (authsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAuthsIsMutable();
auths_.add(value);
onChanged();
} else {
authsBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public Builder addAuths(
int index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth value) {
if (authsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAuthsIsMutable();
auths_.add(index, value);
onChanged();
} else {
authsBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public Builder addAuths(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder builderForValue) {
if (authsBuilder_ == null) {
ensureAuthsIsMutable();
auths_.add(builderForValue.build());
onChanged();
} else {
authsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public Builder addAuths(
int index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder builderForValue) {
if (authsBuilder_ == null) {
ensureAuthsIsMutable();
auths_.add(index, builderForValue.build());
onChanged();
} else {
authsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public Builder addAllAuths(
java.lang.Iterable extends org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth> values) {
if (authsBuilder_ == null) {
ensureAuthsIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, auths_);
onChanged();
} else {
authsBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public Builder clearAuths() {
if (authsBuilder_ == null) {
auths_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
} else {
authsBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public Builder removeAuths(int index) {
if (authsBuilder_ == null) {
ensureAuthsIsMutable();
auths_.remove(index);
onChanged();
} else {
authsBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder getAuthsBuilder(
int index) {
return getAuthsFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder getAuthsOrBuilder(
int index) {
if (authsBuilder_ == null) {
return auths_.get(index); } else {
return authsBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public java.util.List extends org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder>
getAuthsOrBuilderList() {
if (authsBuilder_ != null) {
return authsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(auths_);
}
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder addAuthsBuilder() {
return getAuthsFieldBuilder().addBuilder(
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.getDefaultInstance());
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder addAuthsBuilder(
int index) {
return getAuthsFieldBuilder().addBuilder(
index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.getDefaultInstance());
}
/**
* repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
*/
public java.util.List
getAuthsBuilderList() {
return getAuthsFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder>
getAuthsFieldBuilder() {
if (authsBuilder_ == null) {
authsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder>(
auths_,
((bitField0_ & 0x00000008) != 0),
getParentForChildren(),
isClean());
auths_ = null;
}
return authsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.common.RpcSaslProto)
}
// @@protoc_insertion_point(class_scope:hadoop.common.RpcSaslProto)
private static final org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto();
}
public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RpcSaslProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_RPCTraceInfoProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_RPCTraceInfoProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_RPCCallerContextProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_RPCCallerContextProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_RpcRequestHeaderProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_RpcRequestHeaderProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_RpcResponseHeaderProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_RpcResponseHeaderProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_RpcSaslProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_RpcSaslProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_RpcSaslProto_SaslAuth_fieldAccessorTable;
public static org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\017RpcHeader.proto\022\rhadoop.common\"K\n\021RPCT" +
"raceInfoProto\022\017\n\007traceId\030\001 \001(\003\022\020\n\010parent" +
"Id\030\002 \001(\003\022\023\n\013spanContext\030\003 \001(\014\";\n\025RPCCall" +
"erContextProto\022\017\n\007context\030\001 \002(\t\022\021\n\tsigna" +
"ture\030\002 \001(\014\"\303\003\n\025RpcRequestHeaderProto\022,\n\007" +
"rpcKind\030\001 \001(\0162\033.hadoop.common.RpcKindPro" +
"to\022B\n\005rpcOp\030\002 \001(\01623.hadoop.common.RpcReq" +
"uestHeaderProto.OperationProto\022\016\n\006callId" +
"\030\003 \002(\021\022\020\n\010clientId\030\004 \002(\014\022\026\n\nretryCount\030\005" +
" \001(\021:\002-1\0223\n\ttraceInfo\030\006 \001(\0132 .hadoop.com" +
"mon.RPCTraceInfoProto\022;\n\rcallerContext\030\007" +
" \001(\0132$.hadoop.common.RPCCallerContextPro" +
"to\022\017\n\007stateId\030\010 \001(\003\022\034\n\024routerFederatedSt" +
"ate\030\t \001(\014\"]\n\016OperationProto\022\024\n\020RPC_FINAL" +
"_PACKET\020\000\022\033\n\027RPC_CONTINUATION_PACKET\020\001\022\030" +
"\n\024RPC_CLOSE_CONNECTION\020\002\"\371\005\n\026RpcResponse" +
"HeaderProto\022\016\n\006callId\030\001 \002(\r\022D\n\006status\030\002 " +
"\002(\01624.hadoop.common.RpcResponseHeaderPro" +
"to.RpcStatusProto\022\033\n\023serverIpcVersionNum" +
"\030\003 \001(\r\022\032\n\022exceptionClassName\030\004 \001(\t\022\020\n\010er" +
"rorMsg\030\005 \001(\t\022L\n\013errorDetail\030\006 \001(\01627.hado" +
"op.common.RpcResponseHeaderProto.RpcErro" +
"rCodeProto\022\020\n\010clientId\030\007 \001(\014\022\026\n\nretryCou" +
"nt\030\010 \001(\021:\002-1\022\017\n\007stateId\030\t \001(\003\022\034\n\024routerF" +
"ederatedState\030\n \001(\014\"3\n\016RpcStatusProto\022\013\n" +
"\007SUCCESS\020\000\022\t\n\005ERROR\020\001\022\t\n\005FATAL\020\002\"\341\002\n\021Rpc" +
"ErrorCodeProto\022\025\n\021ERROR_APPLICATION\020\001\022\030\n" +
"\024ERROR_NO_SUCH_METHOD\020\002\022\032\n\026ERROR_NO_SUCH" +
"_PROTOCOL\020\003\022\024\n\020ERROR_RPC_SERVER\020\004\022\036\n\032ERR" +
"OR_SERIALIZING_RESPONSE\020\005\022\036\n\032ERROR_RPC_V" +
"ERSION_MISMATCH\020\006\022\021\n\rFATAL_UNKNOWN\020\n\022#\n\037" +
"FATAL_UNSUPPORTED_SERIALIZATION\020\013\022\034\n\030FAT" +
"AL_INVALID_RPC_HEADER\020\014\022\037\n\033FATAL_DESERIA" +
"LIZING_REQUEST\020\r\022\032\n\026FATAL_VERSION_MISMAT" +
"CH\020\016\022\026\n\022FATAL_UNAUTHORIZED\020\017\"\335\002\n\014RpcSasl" +
"Proto\022\017\n\007version\030\001 \001(\r\0224\n\005state\030\002 \002(\0162%." +
"hadoop.common.RpcSaslProto.SaslState\022\r\n\005" +
"token\030\003 \001(\014\0223\n\005auths\030\004 \003(\0132$.hadoop.comm" +
"on.RpcSaslProto.SaslAuth\032d\n\010SaslAuth\022\016\n\006" +
"method\030\001 \002(\t\022\021\n\tmechanism\030\002 \002(\t\022\020\n\010proto" +
"col\030\003 \001(\t\022\020\n\010serverId\030\004 \001(\t\022\021\n\tchallenge" +
"\030\005 \001(\014\"\\\n\tSaslState\022\013\n\007SUCCESS\020\000\022\r\n\tNEGO" +
"TIATE\020\001\022\014\n\010INITIATE\020\002\022\r\n\tCHALLENGE\020\003\022\014\n\010" +
"RESPONSE\020\004\022\010\n\004WRAP\020\005*J\n\014RpcKindProto\022\017\n\013" +
"RPC_BUILTIN\020\000\022\020\n\014RPC_WRITABLE\020\001\022\027\n\023RPC_P" +
"ROTOCOL_BUFFER\020\002B4\n\036org.apache.hadoop.ip" +
"c.protobufB\017RpcHeaderProtos\240\001\001"
};
descriptor = org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor[] {
});
internal_static_hadoop_common_RPCTraceInfoProto_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_hadoop_common_RPCTraceInfoProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_RPCTraceInfoProto_descriptor,
new java.lang.String[] { "TraceId", "ParentId", "SpanContext", });
internal_static_hadoop_common_RPCCallerContextProto_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_hadoop_common_RPCCallerContextProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_RPCCallerContextProto_descriptor,
new java.lang.String[] { "Context", "Signature", });
internal_static_hadoop_common_RpcRequestHeaderProto_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_hadoop_common_RpcRequestHeaderProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_RpcRequestHeaderProto_descriptor,
new java.lang.String[] { "RpcKind", "RpcOp", "CallId", "ClientId", "RetryCount", "TraceInfo", "CallerContext", "StateId", "RouterFederatedState", });
internal_static_hadoop_common_RpcResponseHeaderProto_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_hadoop_common_RpcResponseHeaderProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_RpcResponseHeaderProto_descriptor,
new java.lang.String[] { "CallId", "Status", "ServerIpcVersionNum", "ExceptionClassName", "ErrorMsg", "ErrorDetail", "ClientId", "RetryCount", "StateId", "RouterFederatedState", });
internal_static_hadoop_common_RpcSaslProto_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_hadoop_common_RpcSaslProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_RpcSaslProto_descriptor,
new java.lang.String[] { "Version", "State", "Token", "Auths", });
internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor =
internal_static_hadoop_common_RpcSaslProto_descriptor.getNestedTypes().get(0);
internal_static_hadoop_common_RpcSaslProto_SaslAuth_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor,
new java.lang.String[] { "Method", "Mechanism", "Protocol", "ServerId", "Challenge", });
}
// @@protoc_insertion_point(outer_class_scope)
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy