Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
org.apache.hudi.org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos Maven / Gradle / Ivy
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: LockService.proto
package org.apache.hadoop.hbase.shaded.protobuf.generated;
@javax.annotation.Generated("proto") public final class LockServiceProtos {
private LockServiceProtos() {}
public static void registerAllExtensions(
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite) registry);
}
/**
* Protobuf enum {@code hbase.pb.LockType}
*/
public enum LockType
implements org.apache.hbase.thirdparty.com.google.protobuf.ProtocolMessageEnum {
/**
* EXCLUSIVE = 1;
*/
EXCLUSIVE(1),
/**
* SHARED = 2;
*/
SHARED(2),
;
/**
* EXCLUSIVE = 1;
*/
public static final int EXCLUSIVE_VALUE = 1;
/**
* SHARED = 2;
*/
public static final int SHARED_VALUE = 2;
public final int getNumber() {
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static LockType valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static LockType forNumber(int value) {
switch (value) {
case 1: return EXCLUSIVE;
case 2: return SHARED;
default: return null;
}
}
public static org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap<
LockType> internalValueMap =
new org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap() {
public LockType findValueByNumber(int number) {
return LockType.forNumber(number);
}
};
public final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.getDescriptor().getEnumTypes().get(0);
}
private static final LockType[] VALUES = values();
public static LockType valueOf(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private LockType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hbase.pb.LockType)
}
/**
* Protobuf enum {@code hbase.pb.LockedResourceType}
*/
public enum LockedResourceType
implements org.apache.hbase.thirdparty.com.google.protobuf.ProtocolMessageEnum {
/**
* SERVER = 1;
*/
SERVER(1),
/**
* NAMESPACE = 2;
*/
NAMESPACE(2),
/**
* TABLE = 3;
*/
TABLE(3),
/**
* REGION = 4;
*/
REGION(4),
/**
* PEER = 5;
*/
PEER(5),
;
/**
* SERVER = 1;
*/
public static final int SERVER_VALUE = 1;
/**
* NAMESPACE = 2;
*/
public static final int NAMESPACE_VALUE = 2;
/**
* TABLE = 3;
*/
public static final int TABLE_VALUE = 3;
/**
* REGION = 4;
*/
public static final int REGION_VALUE = 4;
/**
* PEER = 5;
*/
public static final int PEER_VALUE = 5;
public final int getNumber() {
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static LockedResourceType valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static LockedResourceType forNumber(int value) {
switch (value) {
case 1: return SERVER;
case 2: return NAMESPACE;
case 3: return TABLE;
case 4: return REGION;
case 5: return PEER;
default: return null;
}
}
public static org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap<
LockedResourceType> internalValueMap =
new org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap() {
public LockedResourceType findValueByNumber(int number) {
return LockedResourceType.forNumber(number);
}
};
public final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.getDescriptor().getEnumTypes().get(1);
}
private static final LockedResourceType[] VALUES = values();
public static LockedResourceType valueOf(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private LockedResourceType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hbase.pb.LockedResourceType)
}
public interface LockRequestOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.LockRequest)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* required .hbase.pb.LockType lock_type = 1;
* @return Whether the lockType field is set.
*/
boolean hasLockType();
/**
* required .hbase.pb.LockType lock_type = 1;
* @return The lockType.
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType();
/**
* optional string namespace = 2;
* @return Whether the namespace field is set.
*/
boolean hasNamespace();
/**
* optional string namespace = 2;
* @return The namespace.
*/
java.lang.String getNamespace();
/**
* optional string namespace = 2;
* @return The bytes for namespace.
*/
org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getNamespaceBytes();
/**
* optional .hbase.pb.TableName table_name = 3;
* @return Whether the tableName field is set.
*/
boolean hasTableName();
/**
* optional .hbase.pb.TableName table_name = 3;
* @return The tableName.
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName();
/**
* optional .hbase.pb.TableName table_name = 3;
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
java.util.List
getRegionInfoList();
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index);
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
int getRegionInfoCount();
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
java.util.List
getRegionInfoOrBuilderList();
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(
int index);
/**
* optional string description = 5;
* @return Whether the description field is set.
*/
boolean hasDescription();
/**
* optional string description = 5;
* @return The description.
*/
java.lang.String getDescription();
/**
* optional string description = 5;
* @return The bytes for description.
*/
org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getDescriptionBytes();
/**
* optional uint64 nonce_group = 6 [default = 0];
* @return Whether the nonceGroup field is set.
*/
boolean hasNonceGroup();
/**
* optional uint64 nonce_group = 6 [default = 0];
* @return The nonceGroup.
*/
long getNonceGroup();
/**
* optional uint64 nonce = 7 [default = 0];
* @return Whether the nonce field is set.
*/
boolean hasNonce();
/**
* optional uint64 nonce = 7 [default = 0];
* @return The nonce.
*/
long getNonce();
}
/**
* Protobuf type {@code hbase.pb.LockRequest}
*/
@javax.annotation.Generated("proto") public static final class LockRequest extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.LockRequest)
LockRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use LockRequest.newBuilder() to construct.
private LockRequest(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private LockRequest() {
lockType_ = 1;
namespace_ = "";
regionInfo_ = java.util.Collections.emptyList();
description_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new LockRequest();
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private LockRequest(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
int rawValue = input.readEnum();
@SuppressWarnings("deprecation")
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType value = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
bitField0_ |= 0x00000001;
lockType_ = rawValue;
}
break;
}
case 18: {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000002;
namespace_ = bs;
break;
}
case 26: {
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000004) != 0)) {
subBuilder = tableName_.toBuilder();
}
tableName_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000004;
break;
}
case 34: {
if (!((mutable_bitField0_ & 0x00000008) != 0)) {
regionInfo_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000008;
}
regionInfo_.add(
input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry));
break;
}
case 42: {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000008;
description_ = bs;
break;
}
case 48: {
bitField0_ |= 0x00000010;
nonceGroup_ = input.readUInt64();
break;
}
case 56: {
bitField0_ |= 0x00000020;
nonce_ = input.readUInt64();
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000008) != 0)) {
regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockRequest_descriptor;
}
@java.lang.Override
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.Builder.class);
}
private int bitField0_;
public static final int LOCK_TYPE_FIELD_NUMBER = 1;
private int lockType_;
/**
* required .hbase.pb.LockType lock_type = 1;
* @return Whether the lockType field is set.
*/
@java.lang.Override public boolean hasLockType() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required .hbase.pb.LockType lock_type = 1;
* @return The lockType.
*/
@java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType() {
@SuppressWarnings("deprecation")
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(lockType_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.EXCLUSIVE : result;
}
public static final int NAMESPACE_FIELD_NUMBER = 2;
private volatile java.lang.Object namespace_;
/**
* optional string namespace = 2;
* @return Whether the namespace field is set.
*/
@java.lang.Override
public boolean hasNamespace() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string namespace = 2;
* @return The namespace.
*/
@java.lang.Override
public java.lang.String getNamespace() {
java.lang.Object ref = namespace_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs =
(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
namespace_ = s;
}
return s;
}
}
/**
* optional string namespace = 2;
* @return The bytes for namespace.
*/
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getNamespaceBytes() {
java.lang.Object ref = namespace_;
if (ref instanceof java.lang.String) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString b =
org.apache.hbase.thirdparty.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
namespace_ = b;
return b;
} else {
return (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
}
}
public static final int TABLE_NAME_FIELD_NUMBER = 3;
private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_;
/**
* optional .hbase.pb.TableName table_name = 3;
* @return Whether the tableName field is set.
*/
@java.lang.Override
public boolean hasTableName() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional .hbase.pb.TableName table_name = 3;
* @return The tableName.
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() {
return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
}
/**
* optional .hbase.pb.TableName table_name = 3;
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
}
public static final int REGION_INFO_FIELD_NUMBER = 4;
private java.util.List regionInfo_;
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
@java.lang.Override
public java.util.List getRegionInfoList() {
return regionInfo_;
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
@java.lang.Override
public java.util.List
getRegionInfoOrBuilderList() {
return regionInfo_;
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
@java.lang.Override
public int getRegionInfoCount() {
return regionInfo_.size();
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) {
return regionInfo_.get(index);
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(
int index) {
return regionInfo_.get(index);
}
public static final int DESCRIPTION_FIELD_NUMBER = 5;
private volatile java.lang.Object description_;
/**
* optional string description = 5;
* @return Whether the description field is set.
*/
@java.lang.Override
public boolean hasDescription() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional string description = 5;
* @return The description.
*/
@java.lang.Override
public java.lang.String getDescription() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs =
(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
description_ = s;
}
return s;
}
}
/**
* optional string description = 5;
* @return The bytes for description.
*/
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString b =
org.apache.hbase.thirdparty.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
description_ = b;
return b;
} else {
return (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
}
}
public static final int NONCE_GROUP_FIELD_NUMBER = 6;
private long nonceGroup_;
/**
* optional uint64 nonce_group = 6 [default = 0];
* @return Whether the nonceGroup field is set.
*/
@java.lang.Override
public boolean hasNonceGroup() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* optional uint64 nonce_group = 6 [default = 0];
* @return The nonceGroup.
*/
@java.lang.Override
public long getNonceGroup() {
return nonceGroup_;
}
public static final int NONCE_FIELD_NUMBER = 7;
private long nonce_;
/**
* optional uint64 nonce = 7 [default = 0];
* @return Whether the nonce field is set.
*/
@java.lang.Override
public boolean hasNonce() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* optional uint64 nonce = 7 [default = 0];
* @return The nonce.
*/
@java.lang.Override
public long getNonce() {
return nonce_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasLockType()) {
memoizedIsInitialized = 0;
return false;
}
if (hasTableName()) {
if (!getTableName().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
for (int i = 0; i < getRegionInfoCount(); i++) {
if (!getRegionInfo(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeEnum(1, lockType_);
}
if (((bitField0_ & 0x00000002) != 0)) {
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, namespace_);
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeMessage(3, getTableName());
}
for (int i = 0; i < regionInfo_.size(); i++) {
output.writeMessage(4, regionInfo_.get(i));
}
if (((bitField0_ & 0x00000008) != 0)) {
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.writeString(output, 5, description_);
}
if (((bitField0_ & 0x00000010) != 0)) {
output.writeUInt64(6, nonceGroup_);
}
if (((bitField0_ & 0x00000020) != 0)) {
output.writeUInt64(7, nonce_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeEnumSize(1, lockType_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, namespace_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(3, getTableName());
}
for (int i = 0; i < regionInfo_.size(); i++) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(4, regionInfo_.get(i));
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.computeStringSize(5, description_);
}
if (((bitField0_ & 0x00000010) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeUInt64Size(6, nonceGroup_);
}
if (((bitField0_ & 0x00000020) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeUInt64Size(7, nonce_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest) obj;
if (hasLockType() != other.hasLockType()) return false;
if (hasLockType()) {
if (lockType_ != other.lockType_) return false;
}
if (hasNamespace() != other.hasNamespace()) return false;
if (hasNamespace()) {
if (!getNamespace()
.equals(other.getNamespace())) return false;
}
if (hasTableName() != other.hasTableName()) return false;
if (hasTableName()) {
if (!getTableName()
.equals(other.getTableName())) return false;
}
if (!getRegionInfoList()
.equals(other.getRegionInfoList())) return false;
if (hasDescription() != other.hasDescription()) return false;
if (hasDescription()) {
if (!getDescription()
.equals(other.getDescription())) return false;
}
if (hasNonceGroup() != other.hasNonceGroup()) return false;
if (hasNonceGroup()) {
if (getNonceGroup()
!= other.getNonceGroup()) return false;
}
if (hasNonce() != other.hasNonce()) return false;
if (hasNonce()) {
if (getNonce()
!= other.getNonce()) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasLockType()) {
hash = (37 * hash) + LOCK_TYPE_FIELD_NUMBER;
hash = (53 * hash) + lockType_;
}
if (hasNamespace()) {
hash = (37 * hash) + NAMESPACE_FIELD_NUMBER;
hash = (53 * hash) + getNamespace().hashCode();
}
if (hasTableName()) {
hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getTableName().hashCode();
}
if (getRegionInfoCount() > 0) {
hash = (37 * hash) + REGION_INFO_FIELD_NUMBER;
hash = (53 * hash) + getRegionInfoList().hashCode();
}
if (hasDescription()) {
hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER;
hash = (53 * hash) + getDescription().hashCode();
}
if (hasNonceGroup()) {
hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong(
getNonceGroup());
}
if (hasNonce()) {
hash = (37 * hash) + NONCE_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong(
getNonce());
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.LockRequest}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hbase.pb.LockRequest)
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequestOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockRequest_descriptor;
}
@java.lang.Override
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getTableNameFieldBuilder();
getRegionInfoFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
lockType_ = 1;
bitField0_ = (bitField0_ & ~0x00000001);
namespace_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
if (tableNameBuilder_ == null) {
tableName_ = null;
} else {
tableNameBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
if (regionInfoBuilder_ == null) {
regionInfo_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
} else {
regionInfoBuilder_.clear();
}
description_ = "";
bitField0_ = (bitField0_ & ~0x00000010);
nonceGroup_ = 0L;
bitField0_ = (bitField0_ & ~0x00000020);
nonce_ = 0L;
bitField0_ = (bitField0_ & ~0x00000040);
return this;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockRequest_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.lockType_ = lockType_;
if (((from_bitField0_ & 0x00000002) != 0)) {
to_bitField0_ |= 0x00000002;
}
result.namespace_ = namespace_;
if (((from_bitField0_ & 0x00000004) != 0)) {
if (tableNameBuilder_ == null) {
result.tableName_ = tableName_;
} else {
result.tableName_ = tableNameBuilder_.build();
}
to_bitField0_ |= 0x00000004;
}
if (regionInfoBuilder_ == null) {
if (((bitField0_ & 0x00000008) != 0)) {
regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_);
bitField0_ = (bitField0_ & ~0x00000008);
}
result.regionInfo_ = regionInfo_;
} else {
result.regionInfo_ = regionInfoBuilder_.build();
}
if (((from_bitField0_ & 0x00000010) != 0)) {
to_bitField0_ |= 0x00000008;
}
result.description_ = description_;
if (((from_bitField0_ & 0x00000020) != 0)) {
result.nonceGroup_ = nonceGroup_;
to_bitField0_ |= 0x00000010;
}
if (((from_bitField0_ & 0x00000040) != 0)) {
result.nonce_ = nonce_;
to_bitField0_ |= 0x00000020;
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.getDefaultInstance()) return this;
if (other.hasLockType()) {
setLockType(other.getLockType());
}
if (other.hasNamespace()) {
bitField0_ |= 0x00000002;
namespace_ = other.namespace_;
onChanged();
}
if (other.hasTableName()) {
mergeTableName(other.getTableName());
}
if (regionInfoBuilder_ == null) {
if (!other.regionInfo_.isEmpty()) {
if (regionInfo_.isEmpty()) {
regionInfo_ = other.regionInfo_;
bitField0_ = (bitField0_ & ~0x00000008);
} else {
ensureRegionInfoIsMutable();
regionInfo_.addAll(other.regionInfo_);
}
onChanged();
}
} else {
if (!other.regionInfo_.isEmpty()) {
if (regionInfoBuilder_.isEmpty()) {
regionInfoBuilder_.dispose();
regionInfoBuilder_ = null;
regionInfo_ = other.regionInfo_;
bitField0_ = (bitField0_ & ~0x00000008);
regionInfoBuilder_ =
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getRegionInfoFieldBuilder() : null;
} else {
regionInfoBuilder_.addAllMessages(other.regionInfo_);
}
}
}
if (other.hasDescription()) {
bitField0_ |= 0x00000010;
description_ = other.description_;
onChanged();
}
if (other.hasNonceGroup()) {
setNonceGroup(other.getNonceGroup());
}
if (other.hasNonce()) {
setNonce(other.getNonce());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasLockType()) {
return false;
}
if (hasTableName()) {
if (!getTableName().isInitialized()) {
return false;
}
}
for (int i = 0; i < getRegionInfoCount(); i++) {
if (!getRegionInfo(i).isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int lockType_ = 1;
/**
* required .hbase.pb.LockType lock_type = 1;
* @return Whether the lockType field is set.
*/
@java.lang.Override public boolean hasLockType() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required .hbase.pb.LockType lock_type = 1;
* @return The lockType.
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType() {
@SuppressWarnings("deprecation")
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(lockType_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.EXCLUSIVE : result;
}
/**
* required .hbase.pb.LockType lock_type = 1;
* @param value The lockType to set.
* @return This builder for chaining.
*/
public Builder setLockType(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
lockType_ = value.getNumber();
onChanged();
return this;
}
/**
* required .hbase.pb.LockType lock_type = 1;
* @return This builder for chaining.
*/
public Builder clearLockType() {
bitField0_ = (bitField0_ & ~0x00000001);
lockType_ = 1;
onChanged();
return this;
}
private java.lang.Object namespace_ = "";
/**
* optional string namespace = 2;
* @return Whether the namespace field is set.
*/
public boolean hasNamespace() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string namespace = 2;
* @return The namespace.
*/
public java.lang.String getNamespace() {
java.lang.Object ref = namespace_;
if (!(ref instanceof java.lang.String)) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs =
(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
namespace_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string namespace = 2;
* @return The bytes for namespace.
*/
public org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getNamespaceBytes() {
java.lang.Object ref = namespace_;
if (ref instanceof String) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString b =
org.apache.hbase.thirdparty.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
namespace_ = b;
return b;
} else {
return (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
}
}
/**
* optional string namespace = 2;
* @param value The namespace to set.
* @return This builder for chaining.
*/
public Builder setNamespace(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
namespace_ = value;
onChanged();
return this;
}
/**
* optional string namespace = 2;
* @return This builder for chaining.
*/
public Builder clearNamespace() {
bitField0_ = (bitField0_ & ~0x00000002);
namespace_ = getDefaultInstance().getNamespace();
onChanged();
return this;
}
/**
* optional string namespace = 2;
* @param value The bytes for namespace to set.
* @return This builder for chaining.
*/
public Builder setNamespaceBytes(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
namespace_ = value;
onChanged();
return this;
}
private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* optional .hbase.pb.TableName table_name = 3;
* @return Whether the tableName field is set.
*/
public boolean hasTableName() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional .hbase.pb.TableName table_name = 3;
* @return The tableName.
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
} else {
return tableNameBuilder_.getMessage();
}
}
/**
* optional .hbase.pb.TableName table_name = 3;
*/
public Builder setTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
tableName_ = value;
onChanged();
} else {
tableNameBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* optional .hbase.pb.TableName table_name = 3;
*/
public Builder setTableName(
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
} else {
tableNameBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
return this;
}
/**
* optional .hbase.pb.TableName table_name = 3;
*/
public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0) &&
tableName_ != null &&
tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
tableName_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
onChanged();
} else {
tableNameBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* optional .hbase.pb.TableName table_name = 3;
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
tableName_ = null;
onChanged();
} else {
tableNameBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
/**
* optional .hbase.pb.TableName table_name = 3;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getTableNameFieldBuilder().getBuilder();
}
/**
* optional .hbase.pb.TableName table_name = 3;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
return tableName_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
}
}
/**
* optional .hbase.pb.TableName table_name = 3;
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
getTableName(),
getParentForChildren(),
isClean());
tableName_ = null;
}
return tableNameBuilder_;
}
private java.util.List regionInfo_ =
java.util.Collections.emptyList();
private void ensureRegionInfoIsMutable() {
if (!((bitField0_ & 0x00000008) != 0)) {
regionInfo_ = new java.util.ArrayList(regionInfo_);
bitField0_ |= 0x00000008;
}
}
private org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_;
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public java.util.List getRegionInfoList() {
if (regionInfoBuilder_ == null) {
return java.util.Collections.unmodifiableList(regionInfo_);
} else {
return regionInfoBuilder_.getMessageList();
}
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public int getRegionInfoCount() {
if (regionInfoBuilder_ == null) {
return regionInfo_.size();
} else {
return regionInfoBuilder_.getCount();
}
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) {
if (regionInfoBuilder_ == null) {
return regionInfo_.get(index);
} else {
return regionInfoBuilder_.getMessage(index);
}
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public Builder setRegionInfo(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) {
if (regionInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegionInfoIsMutable();
regionInfo_.set(index, value);
onChanged();
} else {
regionInfoBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public Builder setRegionInfo(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
if (regionInfoBuilder_ == null) {
ensureRegionInfoIsMutable();
regionInfo_.set(index, builderForValue.build());
onChanged();
} else {
regionInfoBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public Builder addRegionInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) {
if (regionInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegionInfoIsMutable();
regionInfo_.add(value);
onChanged();
} else {
regionInfoBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public Builder addRegionInfo(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) {
if (regionInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegionInfoIsMutable();
regionInfo_.add(index, value);
onChanged();
} else {
regionInfoBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public Builder addRegionInfo(
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
if (regionInfoBuilder_ == null) {
ensureRegionInfoIsMutable();
regionInfo_.add(builderForValue.build());
onChanged();
} else {
regionInfoBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public Builder addRegionInfo(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
if (regionInfoBuilder_ == null) {
ensureRegionInfoIsMutable();
regionInfo_.add(index, builderForValue.build());
onChanged();
} else {
regionInfoBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public Builder addAllRegionInfo(
java.lang.Iterable values) {
if (regionInfoBuilder_ == null) {
ensureRegionInfoIsMutable();
org.apache.hbase.thirdparty.com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, regionInfo_);
onChanged();
} else {
regionInfoBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public Builder clearRegionInfo() {
if (regionInfoBuilder_ == null) {
regionInfo_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
} else {
regionInfoBuilder_.clear();
}
return this;
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public Builder removeRegionInfo(int index) {
if (regionInfoBuilder_ == null) {
ensureRegionInfoIsMutable();
regionInfo_.remove(index);
onChanged();
} else {
regionInfoBuilder_.remove(index);
}
return this;
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder(
int index) {
return getRegionInfoFieldBuilder().getBuilder(index);
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(
int index) {
if (regionInfoBuilder_ == null) {
return regionInfo_.get(index); } else {
return regionInfoBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public java.util.List
getRegionInfoOrBuilderList() {
if (regionInfoBuilder_ != null) {
return regionInfoBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(regionInfo_);
}
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder() {
return getRegionInfoFieldBuilder().addBuilder(
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance());
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder(
int index) {
return getRegionInfoFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance());
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public java.util.List
getRegionInfoBuilderList() {
return getRegionInfoFieldBuilder().getBuilderList();
}
private org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
getRegionInfoFieldBuilder() {
if (regionInfoBuilder_ == null) {
regionInfoBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>(
regionInfo_,
((bitField0_ & 0x00000008) != 0),
getParentForChildren(),
isClean());
regionInfo_ = null;
}
return regionInfoBuilder_;
}
private java.lang.Object description_ = "";
/**
* optional string description = 5;
* @return Whether the description field is set.
*/
public boolean hasDescription() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* optional string description = 5;
* @return The description.
*/
public java.lang.String getDescription() {
java.lang.Object ref = description_;
if (!(ref instanceof java.lang.String)) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs =
(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
description_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string description = 5;
* @return The bytes for description.
*/
public org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof String) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString b =
org.apache.hbase.thirdparty.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
description_ = b;
return b;
} else {
return (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
}
}
/**
* optional string description = 5;
* @param value The description to set.
* @return This builder for chaining.
*/
public Builder setDescription(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
description_ = value;
onChanged();
return this;
}
/**
* optional string description = 5;
* @return This builder for chaining.
*/
public Builder clearDescription() {
bitField0_ = (bitField0_ & ~0x00000010);
description_ = getDefaultInstance().getDescription();
onChanged();
return this;
}
/**
* optional string description = 5;
* @param value The bytes for description to set.
* @return This builder for chaining.
*/
public Builder setDescriptionBytes(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
description_ = value;
onChanged();
return this;
}
private long nonceGroup_ ;
/**
* optional uint64 nonce_group = 6 [default = 0];
* @return Whether the nonceGroup field is set.
*/
@java.lang.Override
public boolean hasNonceGroup() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* optional uint64 nonce_group = 6 [default = 0];
* @return The nonceGroup.
*/
@java.lang.Override
public long getNonceGroup() {
return nonceGroup_;
}
/**
* optional uint64 nonce_group = 6 [default = 0];
* @param value The nonceGroup to set.
* @return This builder for chaining.
*/
public Builder setNonceGroup(long value) {
bitField0_ |= 0x00000020;
nonceGroup_ = value;
onChanged();
return this;
}
/**
* optional uint64 nonce_group = 6 [default = 0];
* @return This builder for chaining.
*/
public Builder clearNonceGroup() {
bitField0_ = (bitField0_ & ~0x00000020);
nonceGroup_ = 0L;
onChanged();
return this;
}
private long nonce_ ;
/**
* optional uint64 nonce = 7 [default = 0];
* @return Whether the nonce field is set.
*/
@java.lang.Override
public boolean hasNonce() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
* optional uint64 nonce = 7 [default = 0];
* @return The nonce.
*/
@java.lang.Override
public long getNonce() {
return nonce_;
}
/**
* optional uint64 nonce = 7 [default = 0];
* @param value The nonce to set.
* @return This builder for chaining.
*/
public Builder setNonce(long value) {
bitField0_ |= 0x00000040;
nonce_ = value;
onChanged();
return this;
}
/**
* optional uint64 nonce = 7 [default = 0];
* @return This builder for chaining.
*/
public Builder clearNonce() {
bitField0_ = (bitField0_ & ~0x00000040);
nonce_ = 0L;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.LockRequest)
}
// @@protoc_insertion_point(class_scope:hbase.pb.LockRequest)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser() {
@java.lang.Override
public LockRequest parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new LockRequest(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface LockResponseOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.LockResponse)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* required uint64 proc_id = 1;
* @return Whether the procId field is set.
*/
boolean hasProcId();
/**
* required uint64 proc_id = 1;
* @return The procId.
*/
long getProcId();
}
/**
* Protobuf type {@code hbase.pb.LockResponse}
*/
@javax.annotation.Generated("proto") public static final class LockResponse extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.LockResponse)
LockResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use LockResponse.newBuilder() to construct.
private LockResponse(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private LockResponse() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new LockResponse();
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private LockResponse(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
bitField0_ |= 0x00000001;
procId_ = input.readUInt64();
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockResponse_descriptor;
}
@java.lang.Override
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.Builder.class);
}
private int bitField0_;
public static final int PROC_ID_FIELD_NUMBER = 1;
private long procId_;
/**
* required uint64 proc_id = 1;
* @return Whether the procId field is set.
*/
@java.lang.Override
public boolean hasProcId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required uint64 proc_id = 1;
* @return The procId.
*/
@java.lang.Override
public long getProcId() {
return procId_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasProcId()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeUInt64(1, procId_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeUInt64Size(1, procId_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse) obj;
if (hasProcId() != other.hasProcId()) return false;
if (hasProcId()) {
if (getProcId()
!= other.getProcId()) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasProcId()) {
hash = (37 * hash) + PROC_ID_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong(
getProcId());
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.LockResponse}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hbase.pb.LockResponse)
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponseOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockResponse_descriptor;
}
@java.lang.Override
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
procId_ = 0L;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockResponse_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.procId_ = procId_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.getDefaultInstance()) return this;
if (other.hasProcId()) {
setProcId(other.getProcId());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasProcId()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private long procId_ ;
/**
* required uint64 proc_id = 1;
* @return Whether the procId field is set.
*/
@java.lang.Override
public boolean hasProcId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required uint64 proc_id = 1;
* @return The procId.
*/
@java.lang.Override
public long getProcId() {
return procId_;
}
/**
* required uint64 proc_id = 1;
* @param value The procId to set.
* @return This builder for chaining.
*/
public Builder setProcId(long value) {
bitField0_ |= 0x00000001;
procId_ = value;
onChanged();
return this;
}
/**
* required uint64 proc_id = 1;
* @return This builder for chaining.
*/
public Builder clearProcId() {
bitField0_ = (bitField0_ & ~0x00000001);
procId_ = 0L;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.LockResponse)
}
// @@protoc_insertion_point(class_scope:hbase.pb.LockResponse)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser() {
@java.lang.Override
public LockResponse parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new LockResponse(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface LockHeartbeatRequestOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.LockHeartbeatRequest)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* required uint64 proc_id = 1;
* @return Whether the procId field is set.
*/
boolean hasProcId();
/**
* required uint64 proc_id = 1;
* @return The procId.
*/
long getProcId();
/**
* optional bool keep_alive = 2 [default = true];
* @return Whether the keepAlive field is set.
*/
boolean hasKeepAlive();
/**
* optional bool keep_alive = 2 [default = true];
* @return The keepAlive.
*/
boolean getKeepAlive();
}
/**
* Protobuf type {@code hbase.pb.LockHeartbeatRequest}
*/
@javax.annotation.Generated("proto") public static final class LockHeartbeatRequest extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.LockHeartbeatRequest)
LockHeartbeatRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use LockHeartbeatRequest.newBuilder() to construct.
private LockHeartbeatRequest(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private LockHeartbeatRequest() {
keepAlive_ = true;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new LockHeartbeatRequest();
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private LockHeartbeatRequest(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
bitField0_ |= 0x00000001;
procId_ = input.readUInt64();
break;
}
case 16: {
bitField0_ |= 0x00000002;
keepAlive_ = input.readBool();
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockHeartbeatRequest_descriptor;
}
@java.lang.Override
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockHeartbeatRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest.Builder.class);
}
private int bitField0_;
public static final int PROC_ID_FIELD_NUMBER = 1;
private long procId_;
/**
* required uint64 proc_id = 1;
* @return Whether the procId field is set.
*/
@java.lang.Override
public boolean hasProcId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required uint64 proc_id = 1;
* @return The procId.
*/
@java.lang.Override
public long getProcId() {
return procId_;
}
public static final int KEEP_ALIVE_FIELD_NUMBER = 2;
private boolean keepAlive_;
/**
* optional bool keep_alive = 2 [default = true];
* @return Whether the keepAlive field is set.
*/
@java.lang.Override
public boolean hasKeepAlive() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional bool keep_alive = 2 [default = true];
* @return The keepAlive.
*/
@java.lang.Override
public boolean getKeepAlive() {
return keepAlive_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasProcId()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeUInt64(1, procId_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeBool(2, keepAlive_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeUInt64Size(1, procId_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeBoolSize(2, keepAlive_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest) obj;
if (hasProcId() != other.hasProcId()) return false;
if (hasProcId()) {
if (getProcId()
!= other.getProcId()) return false;
}
if (hasKeepAlive() != other.hasKeepAlive()) return false;
if (hasKeepAlive()) {
if (getKeepAlive()
!= other.getKeepAlive()) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasProcId()) {
hash = (37 * hash) + PROC_ID_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong(
getProcId());
}
if (hasKeepAlive()) {
hash = (37 * hash) + KEEP_ALIVE_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashBoolean(
getKeepAlive());
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.LockHeartbeatRequest}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hbase.pb.LockHeartbeatRequest)
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequestOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockHeartbeatRequest_descriptor;
}
@java.lang.Override
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockHeartbeatRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
procId_ = 0L;
bitField0_ = (bitField0_ & ~0x00000001);
keepAlive_ = true;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockHeartbeatRequest_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.procId_ = procId_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
to_bitField0_ |= 0x00000002;
}
result.keepAlive_ = keepAlive_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest.getDefaultInstance()) return this;
if (other.hasProcId()) {
setProcId(other.getProcId());
}
if (other.hasKeepAlive()) {
setKeepAlive(other.getKeepAlive());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasProcId()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private long procId_ ;
/**
* required uint64 proc_id = 1;
* @return Whether the procId field is set.
*/
@java.lang.Override
public boolean hasProcId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required uint64 proc_id = 1;
* @return The procId.
*/
@java.lang.Override
public long getProcId() {
return procId_;
}
/**
* required uint64 proc_id = 1;
* @param value The procId to set.
* @return This builder for chaining.
*/
public Builder setProcId(long value) {
bitField0_ |= 0x00000001;
procId_ = value;
onChanged();
return this;
}
/**
* required uint64 proc_id = 1;
* @return This builder for chaining.
*/
public Builder clearProcId() {
bitField0_ = (bitField0_ & ~0x00000001);
procId_ = 0L;
onChanged();
return this;
}
private boolean keepAlive_ = true;
/**
* optional bool keep_alive = 2 [default = true];
* @return Whether the keepAlive field is set.
*/
@java.lang.Override
public boolean hasKeepAlive() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional bool keep_alive = 2 [default = true];
* @return The keepAlive.
*/
@java.lang.Override
public boolean getKeepAlive() {
return keepAlive_;
}
/**
* optional bool keep_alive = 2 [default = true];
* @param value The keepAlive to set.
* @return This builder for chaining.
*/
public Builder setKeepAlive(boolean value) {
bitField0_ |= 0x00000002;
keepAlive_ = value;
onChanged();
return this;
}
/**
* optional bool keep_alive = 2 [default = true];
* @return This builder for chaining.
*/
public Builder clearKeepAlive() {
bitField0_ = (bitField0_ & ~0x00000002);
keepAlive_ = true;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.LockHeartbeatRequest)
}
// @@protoc_insertion_point(class_scope:hbase.pb.LockHeartbeatRequest)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser() {
@java.lang.Override
public LockHeartbeatRequest parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new LockHeartbeatRequest(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface LockHeartbeatResponseOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.LockHeartbeatResponse)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* required .hbase.pb.LockHeartbeatResponse.LockStatus lock_status = 1;
* @return Whether the lockStatus field is set.
*/
boolean hasLockStatus();
/**
* required .hbase.pb.LockHeartbeatResponse.LockStatus lock_status = 1;
* @return The lockStatus.
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.LockStatus getLockStatus();
/**
*
* Timeout of lock (if locked).
*
*
* optional uint32 timeout_ms = 2;
* @return Whether the timeoutMs field is set.
*/
boolean hasTimeoutMs();
/**
*
* Timeout of lock (if locked).
*
*
* optional uint32 timeout_ms = 2;
* @return The timeoutMs.
*/
int getTimeoutMs();
}
/**
* Protobuf type {@code hbase.pb.LockHeartbeatResponse}
*/
@javax.annotation.Generated("proto") public static final class LockHeartbeatResponse extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.LockHeartbeatResponse)
LockHeartbeatResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use LockHeartbeatResponse.newBuilder() to construct.
private LockHeartbeatResponse(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private LockHeartbeatResponse() {
lockStatus_ = 1;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new LockHeartbeatResponse();
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private LockHeartbeatResponse(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
int rawValue = input.readEnum();
@SuppressWarnings("deprecation")
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.LockStatus value = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.LockStatus.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
bitField0_ |= 0x00000001;
lockStatus_ = rawValue;
}
break;
}
case 16: {
bitField0_ |= 0x00000002;
timeoutMs_ = input.readUInt32();
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockHeartbeatResponse_descriptor;
}
@java.lang.Override
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockHeartbeatResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.Builder.class);
}
/**
* Protobuf enum {@code hbase.pb.LockHeartbeatResponse.LockStatus}
*/
public enum LockStatus
implements org.apache.hbase.thirdparty.com.google.protobuf.ProtocolMessageEnum {
/**
* UNLOCKED = 1;
*/
UNLOCKED(1),
/**
* LOCKED = 2;
*/
LOCKED(2),
;
/**
* UNLOCKED = 1;
*/
public static final int UNLOCKED_VALUE = 1;
/**
* LOCKED = 2;
*/
public static final int LOCKED_VALUE = 2;
public final int getNumber() {
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static LockStatus valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static LockStatus forNumber(int value) {
switch (value) {
case 1: return UNLOCKED;
case 2: return LOCKED;
default: return null;
}
}
public static org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap<
LockStatus> internalValueMap =
new org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap() {
public LockStatus findValueByNumber(int number) {
return LockStatus.forNumber(number);
}
};
public final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.getDescriptor().getEnumTypes().get(0);
}
private static final LockStatus[] VALUES = values();
public static LockStatus valueOf(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private LockStatus(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hbase.pb.LockHeartbeatResponse.LockStatus)
}
private int bitField0_;
public static final int LOCK_STATUS_FIELD_NUMBER = 1;
private int lockStatus_;
/**
* required .hbase.pb.LockHeartbeatResponse.LockStatus lock_status = 1;
* @return Whether the lockStatus field is set.
*/
@java.lang.Override public boolean hasLockStatus() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required .hbase.pb.LockHeartbeatResponse.LockStatus lock_status = 1;
* @return The lockStatus.
*/
@java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.LockStatus getLockStatus() {
@SuppressWarnings("deprecation")
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.LockStatus result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.LockStatus.valueOf(lockStatus_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.LockStatus.UNLOCKED : result;
}
public static final int TIMEOUT_MS_FIELD_NUMBER = 2;
private int timeoutMs_;
/**
*
* Timeout of lock (if locked).
*
*
* optional uint32 timeout_ms = 2;
* @return Whether the timeoutMs field is set.
*/
@java.lang.Override
public boolean hasTimeoutMs() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
* Timeout of lock (if locked).
*
*
* optional uint32 timeout_ms = 2;
* @return The timeoutMs.
*/
@java.lang.Override
public int getTimeoutMs() {
return timeoutMs_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasLockStatus()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeEnum(1, lockStatus_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeUInt32(2, timeoutMs_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeEnumSize(1, lockStatus_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeUInt32Size(2, timeoutMs_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse) obj;
if (hasLockStatus() != other.hasLockStatus()) return false;
if (hasLockStatus()) {
if (lockStatus_ != other.lockStatus_) return false;
}
if (hasTimeoutMs() != other.hasTimeoutMs()) return false;
if (hasTimeoutMs()) {
if (getTimeoutMs()
!= other.getTimeoutMs()) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasLockStatus()) {
hash = (37 * hash) + LOCK_STATUS_FIELD_NUMBER;
hash = (53 * hash) + lockStatus_;
}
if (hasTimeoutMs()) {
hash = (37 * hash) + TIMEOUT_MS_FIELD_NUMBER;
hash = (53 * hash) + getTimeoutMs();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.LockHeartbeatResponse}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hbase.pb.LockHeartbeatResponse)
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponseOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockHeartbeatResponse_descriptor;
}
@java.lang.Override
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockHeartbeatResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
lockStatus_ = 1;
bitField0_ = (bitField0_ & ~0x00000001);
timeoutMs_ = 0;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockHeartbeatResponse_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.lockStatus_ = lockStatus_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.timeoutMs_ = timeoutMs_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.getDefaultInstance()) return this;
if (other.hasLockStatus()) {
setLockStatus(other.getLockStatus());
}
if (other.hasTimeoutMs()) {
setTimeoutMs(other.getTimeoutMs());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasLockStatus()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int lockStatus_ = 1;
/**
* required .hbase.pb.LockHeartbeatResponse.LockStatus lock_status = 1;
* @return Whether the lockStatus field is set.
*/
@java.lang.Override public boolean hasLockStatus() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required .hbase.pb.LockHeartbeatResponse.LockStatus lock_status = 1;
* @return The lockStatus.
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.LockStatus getLockStatus() {
@SuppressWarnings("deprecation")
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.LockStatus result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.LockStatus.valueOf(lockStatus_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.LockStatus.UNLOCKED : result;
}
/**
* required .hbase.pb.LockHeartbeatResponse.LockStatus lock_status = 1;
* @param value The lockStatus to set.
* @return This builder for chaining.
*/
public Builder setLockStatus(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.LockStatus value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
lockStatus_ = value.getNumber();
onChanged();
return this;
}
/**
* required .hbase.pb.LockHeartbeatResponse.LockStatus lock_status = 1;
* @return This builder for chaining.
*/
public Builder clearLockStatus() {
bitField0_ = (bitField0_ & ~0x00000001);
lockStatus_ = 1;
onChanged();
return this;
}
private int timeoutMs_ ;
/**
*
* Timeout of lock (if locked).
*
*
* optional uint32 timeout_ms = 2;
* @return Whether the timeoutMs field is set.
*/
@java.lang.Override
public boolean hasTimeoutMs() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
* Timeout of lock (if locked).
*
*
* optional uint32 timeout_ms = 2;
* @return The timeoutMs.
*/
@java.lang.Override
public int getTimeoutMs() {
return timeoutMs_;
}
/**
*
* Timeout of lock (if locked).
*
*
* optional uint32 timeout_ms = 2;
* @param value The timeoutMs to set.
* @return This builder for chaining.
*/
public Builder setTimeoutMs(int value) {
bitField0_ |= 0x00000002;
timeoutMs_ = value;
onChanged();
return this;
}
/**
*
* Timeout of lock (if locked).
*
*
* optional uint32 timeout_ms = 2;
* @return This builder for chaining.
*/
public Builder clearTimeoutMs() {
bitField0_ = (bitField0_ & ~0x00000002);
timeoutMs_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.LockHeartbeatResponse)
}
// @@protoc_insertion_point(class_scope:hbase.pb.LockHeartbeatResponse)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser() {
@java.lang.Override
public LockHeartbeatResponse parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new LockHeartbeatResponse(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface LockProcedureDataOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.LockProcedureData)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* required .hbase.pb.LockType lock_type = 1;
* @return Whether the lockType field is set.
*/
boolean hasLockType();
/**
* required .hbase.pb.LockType lock_type = 1;
* @return The lockType.
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType();
/**
* optional string namespace = 2;
* @return Whether the namespace field is set.
*/
boolean hasNamespace();
/**
* optional string namespace = 2;
* @return The namespace.
*/
java.lang.String getNamespace();
/**
* optional string namespace = 2;
* @return The bytes for namespace.
*/
org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getNamespaceBytes();
/**
* optional .hbase.pb.TableName table_name = 3;
* @return Whether the tableName field is set.
*/
boolean hasTableName();
/**
* optional .hbase.pb.TableName table_name = 3;
* @return The tableName.
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName();
/**
* optional .hbase.pb.TableName table_name = 3;
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
java.util.List
getRegionInfoList();
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index);
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
int getRegionInfoCount();
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
java.util.List
getRegionInfoOrBuilderList();
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(
int index);
/**
* optional string description = 5;
* @return Whether the description field is set.
*/
boolean hasDescription();
/**
* optional string description = 5;
* @return The description.
*/
java.lang.String getDescription();
/**
* optional string description = 5;
* @return The bytes for description.
*/
org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getDescriptionBytes();
/**
* optional bool is_master_lock = 6 [default = false];
* @return Whether the isMasterLock field is set.
*/
boolean hasIsMasterLock();
/**
* optional bool is_master_lock = 6 [default = false];
* @return The isMasterLock.
*/
boolean getIsMasterLock();
}
/**
* Protobuf type {@code hbase.pb.LockProcedureData}
*/
@javax.annotation.Generated("proto") public static final class LockProcedureData extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.LockProcedureData)
LockProcedureDataOrBuilder {
private static final long serialVersionUID = 0L;
// Use LockProcedureData.newBuilder() to construct.
private LockProcedureData(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private LockProcedureData() {
lockType_ = 1;
namespace_ = "";
regionInfo_ = java.util.Collections.emptyList();
description_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new LockProcedureData();
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private LockProcedureData(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
int rawValue = input.readEnum();
@SuppressWarnings("deprecation")
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType value = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
bitField0_ |= 0x00000001;
lockType_ = rawValue;
}
break;
}
case 18: {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000002;
namespace_ = bs;
break;
}
case 26: {
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000004) != 0)) {
subBuilder = tableName_.toBuilder();
}
tableName_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000004;
break;
}
case 34: {
if (!((mutable_bitField0_ & 0x00000008) != 0)) {
regionInfo_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000008;
}
regionInfo_.add(
input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry));
break;
}
case 42: {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000008;
description_ = bs;
break;
}
case 48: {
bitField0_ |= 0x00000010;
isMasterLock_ = input.readBool();
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000008) != 0)) {
regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockProcedureData_descriptor;
}
@java.lang.Override
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockProcedureData_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData.Builder.class);
}
private int bitField0_;
public static final int LOCK_TYPE_FIELD_NUMBER = 1;
private int lockType_;
/**
* required .hbase.pb.LockType lock_type = 1;
* @return Whether the lockType field is set.
*/
@java.lang.Override public boolean hasLockType() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required .hbase.pb.LockType lock_type = 1;
* @return The lockType.
*/
@java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType() {
@SuppressWarnings("deprecation")
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(lockType_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.EXCLUSIVE : result;
}
public static final int NAMESPACE_FIELD_NUMBER = 2;
private volatile java.lang.Object namespace_;
/**
* optional string namespace = 2;
* @return Whether the namespace field is set.
*/
@java.lang.Override
public boolean hasNamespace() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string namespace = 2;
* @return The namespace.
*/
@java.lang.Override
public java.lang.String getNamespace() {
java.lang.Object ref = namespace_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs =
(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
namespace_ = s;
}
return s;
}
}
/**
* optional string namespace = 2;
* @return The bytes for namespace.
*/
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getNamespaceBytes() {
java.lang.Object ref = namespace_;
if (ref instanceof java.lang.String) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString b =
org.apache.hbase.thirdparty.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
namespace_ = b;
return b;
} else {
return (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
}
}
public static final int TABLE_NAME_FIELD_NUMBER = 3;
private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_;
/**
* optional .hbase.pb.TableName table_name = 3;
* @return Whether the tableName field is set.
*/
@java.lang.Override
public boolean hasTableName() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional .hbase.pb.TableName table_name = 3;
* @return The tableName.
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() {
return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
}
/**
* optional .hbase.pb.TableName table_name = 3;
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
}
public static final int REGION_INFO_FIELD_NUMBER = 4;
private java.util.List regionInfo_;
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
@java.lang.Override
public java.util.List getRegionInfoList() {
return regionInfo_;
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
@java.lang.Override
public java.util.List
getRegionInfoOrBuilderList() {
return regionInfo_;
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
@java.lang.Override
public int getRegionInfoCount() {
return regionInfo_.size();
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) {
return regionInfo_.get(index);
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(
int index) {
return regionInfo_.get(index);
}
public static final int DESCRIPTION_FIELD_NUMBER = 5;
private volatile java.lang.Object description_;
/**
* optional string description = 5;
* @return Whether the description field is set.
*/
@java.lang.Override
public boolean hasDescription() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional string description = 5;
* @return The description.
*/
@java.lang.Override
public java.lang.String getDescription() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs =
(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
description_ = s;
}
return s;
}
}
/**
* optional string description = 5;
* @return The bytes for description.
*/
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString b =
org.apache.hbase.thirdparty.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
description_ = b;
return b;
} else {
return (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
}
}
public static final int IS_MASTER_LOCK_FIELD_NUMBER = 6;
private boolean isMasterLock_;
/**
* optional bool is_master_lock = 6 [default = false];
* @return Whether the isMasterLock field is set.
*/
@java.lang.Override
public boolean hasIsMasterLock() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* optional bool is_master_lock = 6 [default = false];
* @return The isMasterLock.
*/
@java.lang.Override
public boolean getIsMasterLock() {
return isMasterLock_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasLockType()) {
memoizedIsInitialized = 0;
return false;
}
if (hasTableName()) {
if (!getTableName().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
for (int i = 0; i < getRegionInfoCount(); i++) {
if (!getRegionInfo(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeEnum(1, lockType_);
}
if (((bitField0_ & 0x00000002) != 0)) {
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, namespace_);
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeMessage(3, getTableName());
}
for (int i = 0; i < regionInfo_.size(); i++) {
output.writeMessage(4, regionInfo_.get(i));
}
if (((bitField0_ & 0x00000008) != 0)) {
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.writeString(output, 5, description_);
}
if (((bitField0_ & 0x00000010) != 0)) {
output.writeBool(6, isMasterLock_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeEnumSize(1, lockType_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, namespace_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(3, getTableName());
}
for (int i = 0; i < regionInfo_.size(); i++) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(4, regionInfo_.get(i));
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.computeStringSize(5, description_);
}
if (((bitField0_ & 0x00000010) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeBoolSize(6, isMasterLock_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData other = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData) obj;
if (hasLockType() != other.hasLockType()) return false;
if (hasLockType()) {
if (lockType_ != other.lockType_) return false;
}
if (hasNamespace() != other.hasNamespace()) return false;
if (hasNamespace()) {
if (!getNamespace()
.equals(other.getNamespace())) return false;
}
if (hasTableName() != other.hasTableName()) return false;
if (hasTableName()) {
if (!getTableName()
.equals(other.getTableName())) return false;
}
if (!getRegionInfoList()
.equals(other.getRegionInfoList())) return false;
if (hasDescription() != other.hasDescription()) return false;
if (hasDescription()) {
if (!getDescription()
.equals(other.getDescription())) return false;
}
if (hasIsMasterLock() != other.hasIsMasterLock()) return false;
if (hasIsMasterLock()) {
if (getIsMasterLock()
!= other.getIsMasterLock()) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasLockType()) {
hash = (37 * hash) + LOCK_TYPE_FIELD_NUMBER;
hash = (53 * hash) + lockType_;
}
if (hasNamespace()) {
hash = (37 * hash) + NAMESPACE_FIELD_NUMBER;
hash = (53 * hash) + getNamespace().hashCode();
}
if (hasTableName()) {
hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getTableName().hashCode();
}
if (getRegionInfoCount() > 0) {
hash = (37 * hash) + REGION_INFO_FIELD_NUMBER;
hash = (53 * hash) + getRegionInfoList().hashCode();
}
if (hasDescription()) {
hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER;
hash = (53 * hash) + getDescription().hashCode();
}
if (hasIsMasterLock()) {
hash = (37 * hash) + IS_MASTER_LOCK_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashBoolean(
getIsMasterLock());
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.LockProcedureData}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hbase.pb.LockProcedureData)
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureDataOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockProcedureData_descriptor;
}
@java.lang.Override
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockProcedureData_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getTableNameFieldBuilder();
getRegionInfoFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
lockType_ = 1;
bitField0_ = (bitField0_ & ~0x00000001);
namespace_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
if (tableNameBuilder_ == null) {
tableName_ = null;
} else {
tableNameBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
if (regionInfoBuilder_ == null) {
regionInfo_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
} else {
regionInfoBuilder_.clear();
}
description_ = "";
bitField0_ = (bitField0_ & ~0x00000010);
isMasterLock_ = false;
bitField0_ = (bitField0_ & ~0x00000020);
return this;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockProcedureData_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData result = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.lockType_ = lockType_;
if (((from_bitField0_ & 0x00000002) != 0)) {
to_bitField0_ |= 0x00000002;
}
result.namespace_ = namespace_;
if (((from_bitField0_ & 0x00000004) != 0)) {
if (tableNameBuilder_ == null) {
result.tableName_ = tableName_;
} else {
result.tableName_ = tableNameBuilder_.build();
}
to_bitField0_ |= 0x00000004;
}
if (regionInfoBuilder_ == null) {
if (((bitField0_ & 0x00000008) != 0)) {
regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_);
bitField0_ = (bitField0_ & ~0x00000008);
}
result.regionInfo_ = regionInfo_;
} else {
result.regionInfo_ = regionInfoBuilder_.build();
}
if (((from_bitField0_ & 0x00000010) != 0)) {
to_bitField0_ |= 0x00000008;
}
result.description_ = description_;
if (((from_bitField0_ & 0x00000020) != 0)) {
result.isMasterLock_ = isMasterLock_;
to_bitField0_ |= 0x00000010;
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData.getDefaultInstance()) return this;
if (other.hasLockType()) {
setLockType(other.getLockType());
}
if (other.hasNamespace()) {
bitField0_ |= 0x00000002;
namespace_ = other.namespace_;
onChanged();
}
if (other.hasTableName()) {
mergeTableName(other.getTableName());
}
if (regionInfoBuilder_ == null) {
if (!other.regionInfo_.isEmpty()) {
if (regionInfo_.isEmpty()) {
regionInfo_ = other.regionInfo_;
bitField0_ = (bitField0_ & ~0x00000008);
} else {
ensureRegionInfoIsMutable();
regionInfo_.addAll(other.regionInfo_);
}
onChanged();
}
} else {
if (!other.regionInfo_.isEmpty()) {
if (regionInfoBuilder_.isEmpty()) {
regionInfoBuilder_.dispose();
regionInfoBuilder_ = null;
regionInfo_ = other.regionInfo_;
bitField0_ = (bitField0_ & ~0x00000008);
regionInfoBuilder_ =
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getRegionInfoFieldBuilder() : null;
} else {
regionInfoBuilder_.addAllMessages(other.regionInfo_);
}
}
}
if (other.hasDescription()) {
bitField0_ |= 0x00000010;
description_ = other.description_;
onChanged();
}
if (other.hasIsMasterLock()) {
setIsMasterLock(other.getIsMasterLock());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasLockType()) {
return false;
}
if (hasTableName()) {
if (!getTableName().isInitialized()) {
return false;
}
}
for (int i = 0; i < getRegionInfoCount(); i++) {
if (!getRegionInfo(i).isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int lockType_ = 1;
/**
* required .hbase.pb.LockType lock_type = 1;
* @return Whether the lockType field is set.
*/
@java.lang.Override public boolean hasLockType() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required .hbase.pb.LockType lock_type = 1;
* @return The lockType.
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType() {
@SuppressWarnings("deprecation")
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(lockType_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.EXCLUSIVE : result;
}
/**
* required .hbase.pb.LockType lock_type = 1;
* @param value The lockType to set.
* @return This builder for chaining.
*/
public Builder setLockType(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
lockType_ = value.getNumber();
onChanged();
return this;
}
/**
* required .hbase.pb.LockType lock_type = 1;
* @return This builder for chaining.
*/
public Builder clearLockType() {
bitField0_ = (bitField0_ & ~0x00000001);
lockType_ = 1;
onChanged();
return this;
}
private java.lang.Object namespace_ = "";
/**
* optional string namespace = 2;
* @return Whether the namespace field is set.
*/
public boolean hasNamespace() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string namespace = 2;
* @return The namespace.
*/
public java.lang.String getNamespace() {
java.lang.Object ref = namespace_;
if (!(ref instanceof java.lang.String)) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs =
(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
namespace_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string namespace = 2;
* @return The bytes for namespace.
*/
public org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getNamespaceBytes() {
java.lang.Object ref = namespace_;
if (ref instanceof String) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString b =
org.apache.hbase.thirdparty.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
namespace_ = b;
return b;
} else {
return (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
}
}
/**
* optional string namespace = 2;
* @param value The namespace to set.
* @return This builder for chaining.
*/
public Builder setNamespace(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
namespace_ = value;
onChanged();
return this;
}
/**
* optional string namespace = 2;
* @return This builder for chaining.
*/
public Builder clearNamespace() {
bitField0_ = (bitField0_ & ~0x00000002);
namespace_ = getDefaultInstance().getNamespace();
onChanged();
return this;
}
/**
* optional string namespace = 2;
* @param value The bytes for namespace to set.
* @return This builder for chaining.
*/
public Builder setNamespaceBytes(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
namespace_ = value;
onChanged();
return this;
}
private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* optional .hbase.pb.TableName table_name = 3;
* @return Whether the tableName field is set.
*/
public boolean hasTableName() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional .hbase.pb.TableName table_name = 3;
* @return The tableName.
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
} else {
return tableNameBuilder_.getMessage();
}
}
/**
* optional .hbase.pb.TableName table_name = 3;
*/
public Builder setTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
tableName_ = value;
onChanged();
} else {
tableNameBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* optional .hbase.pb.TableName table_name = 3;
*/
public Builder setTableName(
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
} else {
tableNameBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
return this;
}
/**
* optional .hbase.pb.TableName table_name = 3;
*/
public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0) &&
tableName_ != null &&
tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
tableName_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
onChanged();
} else {
tableNameBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* optional .hbase.pb.TableName table_name = 3;
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
tableName_ = null;
onChanged();
} else {
tableNameBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
/**
* optional .hbase.pb.TableName table_name = 3;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getTableNameFieldBuilder().getBuilder();
}
/**
* optional .hbase.pb.TableName table_name = 3;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
return tableName_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
}
}
/**
* optional .hbase.pb.TableName table_name = 3;
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
getTableName(),
getParentForChildren(),
isClean());
tableName_ = null;
}
return tableNameBuilder_;
}
private java.util.List regionInfo_ =
java.util.Collections.emptyList();
private void ensureRegionInfoIsMutable() {
if (!((bitField0_ & 0x00000008) != 0)) {
regionInfo_ = new java.util.ArrayList(regionInfo_);
bitField0_ |= 0x00000008;
}
}
private org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_;
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public java.util.List getRegionInfoList() {
if (regionInfoBuilder_ == null) {
return java.util.Collections.unmodifiableList(regionInfo_);
} else {
return regionInfoBuilder_.getMessageList();
}
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public int getRegionInfoCount() {
if (regionInfoBuilder_ == null) {
return regionInfo_.size();
} else {
return regionInfoBuilder_.getCount();
}
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) {
if (regionInfoBuilder_ == null) {
return regionInfo_.get(index);
} else {
return regionInfoBuilder_.getMessage(index);
}
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public Builder setRegionInfo(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) {
if (regionInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegionInfoIsMutable();
regionInfo_.set(index, value);
onChanged();
} else {
regionInfoBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public Builder setRegionInfo(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
if (regionInfoBuilder_ == null) {
ensureRegionInfoIsMutable();
regionInfo_.set(index, builderForValue.build());
onChanged();
} else {
regionInfoBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public Builder addRegionInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) {
if (regionInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegionInfoIsMutable();
regionInfo_.add(value);
onChanged();
} else {
regionInfoBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public Builder addRegionInfo(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) {
if (regionInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegionInfoIsMutable();
regionInfo_.add(index, value);
onChanged();
} else {
regionInfoBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public Builder addRegionInfo(
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
if (regionInfoBuilder_ == null) {
ensureRegionInfoIsMutable();
regionInfo_.add(builderForValue.build());
onChanged();
} else {
regionInfoBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public Builder addRegionInfo(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
if (regionInfoBuilder_ == null) {
ensureRegionInfoIsMutable();
regionInfo_.add(index, builderForValue.build());
onChanged();
} else {
regionInfoBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public Builder addAllRegionInfo(
java.lang.Iterable values) {
if (regionInfoBuilder_ == null) {
ensureRegionInfoIsMutable();
org.apache.hbase.thirdparty.com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, regionInfo_);
onChanged();
} else {
regionInfoBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public Builder clearRegionInfo() {
if (regionInfoBuilder_ == null) {
regionInfo_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
} else {
regionInfoBuilder_.clear();
}
return this;
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public Builder removeRegionInfo(int index) {
if (regionInfoBuilder_ == null) {
ensureRegionInfoIsMutable();
regionInfo_.remove(index);
onChanged();
} else {
regionInfoBuilder_.remove(index);
}
return this;
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder(
int index) {
return getRegionInfoFieldBuilder().getBuilder(index);
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(
int index) {
if (regionInfoBuilder_ == null) {
return regionInfo_.get(index); } else {
return regionInfoBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public java.util.List
getRegionInfoOrBuilderList() {
if (regionInfoBuilder_ != null) {
return regionInfoBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(regionInfo_);
}
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder() {
return getRegionInfoFieldBuilder().addBuilder(
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance());
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder(
int index) {
return getRegionInfoFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance());
}
/**
* repeated .hbase.pb.RegionInfo region_info = 4;
*/
public java.util.List
getRegionInfoBuilderList() {
return getRegionInfoFieldBuilder().getBuilderList();
}
private org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
getRegionInfoFieldBuilder() {
if (regionInfoBuilder_ == null) {
regionInfoBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>(
regionInfo_,
((bitField0_ & 0x00000008) != 0),
getParentForChildren(),
isClean());
regionInfo_ = null;
}
return regionInfoBuilder_;
}
private java.lang.Object description_ = "";
/**
* optional string description = 5;
* @return Whether the description field is set.
*/
public boolean hasDescription() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* optional string description = 5;
* @return The description.
*/
public java.lang.String getDescription() {
java.lang.Object ref = description_;
if (!(ref instanceof java.lang.String)) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs =
(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
description_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string description = 5;
* @return The bytes for description.
*/
public org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof String) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString b =
org.apache.hbase.thirdparty.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
description_ = b;
return b;
} else {
return (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
}
}
/**
* optional string description = 5;
* @param value The description to set.
* @return This builder for chaining.
*/
public Builder setDescription(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
description_ = value;
onChanged();
return this;
}
/**
* optional string description = 5;
* @return This builder for chaining.
*/
public Builder clearDescription() {
bitField0_ = (bitField0_ & ~0x00000010);
description_ = getDefaultInstance().getDescription();
onChanged();
return this;
}
/**
* optional string description = 5;
* @param value The bytes for description to set.
* @return This builder for chaining.
*/
public Builder setDescriptionBytes(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
description_ = value;
onChanged();
return this;
}
private boolean isMasterLock_ ;
/**
* optional bool is_master_lock = 6 [default = false];
* @return Whether the isMasterLock field is set.
*/
@java.lang.Override
public boolean hasIsMasterLock() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* optional bool is_master_lock = 6 [default = false];
* @return The isMasterLock.
*/
@java.lang.Override
public boolean getIsMasterLock() {
return isMasterLock_;
}
/**
* optional bool is_master_lock = 6 [default = false];
* @param value The isMasterLock to set.
* @return This builder for chaining.
*/
public Builder setIsMasterLock(boolean value) {
bitField0_ |= 0x00000020;
isMasterLock_ = value;
onChanged();
return this;
}
/**
* optional bool is_master_lock = 6 [default = false];
* @return This builder for chaining.
*/
public Builder clearIsMasterLock() {
bitField0_ = (bitField0_ & ~0x00000020);
isMasterLock_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.LockProcedureData)
}
// @@protoc_insertion_point(class_scope:hbase.pb.LockProcedureData)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser() {
@java.lang.Override
public LockProcedureData parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new LockProcedureData(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface LockedResourceOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.LockedResource)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* required .hbase.pb.LockedResourceType resource_type = 1;
* @return Whether the resourceType field is set.
*/
boolean hasResourceType();
/**
* required .hbase.pb.LockedResourceType resource_type = 1;
* @return The resourceType.
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResourceType getResourceType();
/**
* optional string resource_name = 2;
* @return Whether the resourceName field is set.
*/
boolean hasResourceName();
/**
* optional string resource_name = 2;
* @return The resourceName.
*/
java.lang.String getResourceName();
/**
* optional string resource_name = 2;
* @return The bytes for resourceName.
*/
org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getResourceNameBytes();
/**
* required .hbase.pb.LockType lock_type = 3;
* @return Whether the lockType field is set.
*/
boolean hasLockType();
/**
* required .hbase.pb.LockType lock_type = 3;
* @return The lockType.
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType();
/**
* optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;
* @return Whether the exclusiveLockOwnerProcedure field is set.
*/
boolean hasExclusiveLockOwnerProcedure();
/**
* optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;
* @return The exclusiveLockOwnerProcedure.
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure getExclusiveLockOwnerProcedure();
/**
* optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getExclusiveLockOwnerProcedureOrBuilder();
/**
* optional int32 shared_lock_count = 5;
* @return Whether the sharedLockCount field is set.
*/
boolean hasSharedLockCount();
/**
* optional int32 shared_lock_count = 5;
* @return The sharedLockCount.
*/
int getSharedLockCount();
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
java.util.List
getWaitingProceduresList();
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure getWaitingProcedures(int index);
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
int getWaitingProceduresCount();
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
java.util.List
getWaitingProceduresOrBuilderList();
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getWaitingProceduresOrBuilder(
int index);
}
/**
* Protobuf type {@code hbase.pb.LockedResource}
*/
@javax.annotation.Generated("proto") public static final class LockedResource extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.LockedResource)
LockedResourceOrBuilder {
private static final long serialVersionUID = 0L;
// Use LockedResource.newBuilder() to construct.
private LockedResource(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private LockedResource() {
resourceType_ = 1;
resourceName_ = "";
lockType_ = 1;
waitingProcedures_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new LockedResource();
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private LockedResource(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
int rawValue = input.readEnum();
@SuppressWarnings("deprecation")
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResourceType value = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResourceType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
bitField0_ |= 0x00000001;
resourceType_ = rawValue;
}
break;
}
case 18: {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000002;
resourceName_ = bs;
break;
}
case 24: {
int rawValue = input.readEnum();
@SuppressWarnings("deprecation")
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType value = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(3, rawValue);
} else {
bitField0_ |= 0x00000004;
lockType_ = rawValue;
}
break;
}
case 34: {
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder subBuilder = null;
if (((bitField0_ & 0x00000008) != 0)) {
subBuilder = exclusiveLockOwnerProcedure_.toBuilder();
}
exclusiveLockOwnerProcedure_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(exclusiveLockOwnerProcedure_);
exclusiveLockOwnerProcedure_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000008;
break;
}
case 40: {
bitField0_ |= 0x00000010;
sharedLockCount_ = input.readInt32();
break;
}
case 50: {
if (!((mutable_bitField0_ & 0x00000020) != 0)) {
waitingProcedures_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000020;
}
waitingProcedures_.add(
input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.PARSER, extensionRegistry));
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000020) != 0)) {
waitingProcedures_ = java.util.Collections.unmodifiableList(waitingProcedures_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockedResource_descriptor;
}
@java.lang.Override
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockedResource_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource.Builder.class);
}
private int bitField0_;
public static final int RESOURCE_TYPE_FIELD_NUMBER = 1;
private int resourceType_;
/**
* required .hbase.pb.LockedResourceType resource_type = 1;
* @return Whether the resourceType field is set.
*/
@java.lang.Override public boolean hasResourceType() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required .hbase.pb.LockedResourceType resource_type = 1;
* @return The resourceType.
*/
@java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResourceType getResourceType() {
@SuppressWarnings("deprecation")
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResourceType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResourceType.valueOf(resourceType_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResourceType.SERVER : result;
}
public static final int RESOURCE_NAME_FIELD_NUMBER = 2;
private volatile java.lang.Object resourceName_;
/**
* optional string resource_name = 2;
* @return Whether the resourceName field is set.
*/
@java.lang.Override
public boolean hasResourceName() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string resource_name = 2;
* @return The resourceName.
*/
@java.lang.Override
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs =
(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
resourceName_ = s;
}
return s;
}
}
/**
* optional string resource_name = 2;
* @return The bytes for resourceName.
*/
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString b =
org.apache.hbase.thirdparty.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
}
}
public static final int LOCK_TYPE_FIELD_NUMBER = 3;
private int lockType_;
/**
* required .hbase.pb.LockType lock_type = 3;
* @return Whether the lockType field is set.
*/
@java.lang.Override public boolean hasLockType() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* required .hbase.pb.LockType lock_type = 3;
* @return The lockType.
*/
@java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType() {
@SuppressWarnings("deprecation")
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(lockType_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.EXCLUSIVE : result;
}
public static final int EXCLUSIVE_LOCK_OWNER_PROCEDURE_FIELD_NUMBER = 4;
private org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure exclusiveLockOwnerProcedure_;
/**
* optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;
* @return Whether the exclusiveLockOwnerProcedure field is set.
*/
@java.lang.Override
public boolean hasExclusiveLockOwnerProcedure() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;
* @return The exclusiveLockOwnerProcedure.
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure getExclusiveLockOwnerProcedure() {
return exclusiveLockOwnerProcedure_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance() : exclusiveLockOwnerProcedure_;
}
/**
* optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getExclusiveLockOwnerProcedureOrBuilder() {
return exclusiveLockOwnerProcedure_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance() : exclusiveLockOwnerProcedure_;
}
public static final int SHARED_LOCK_COUNT_FIELD_NUMBER = 5;
private int sharedLockCount_;
/**
* optional int32 shared_lock_count = 5;
* @return Whether the sharedLockCount field is set.
*/
@java.lang.Override
public boolean hasSharedLockCount() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* optional int32 shared_lock_count = 5;
* @return The sharedLockCount.
*/
@java.lang.Override
public int getSharedLockCount() {
return sharedLockCount_;
}
public static final int WAITINGPROCEDURES_FIELD_NUMBER = 6;
private java.util.List waitingProcedures_;
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
@java.lang.Override
public java.util.List getWaitingProceduresList() {
return waitingProcedures_;
}
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
@java.lang.Override
public java.util.List
getWaitingProceduresOrBuilderList() {
return waitingProcedures_;
}
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
@java.lang.Override
public int getWaitingProceduresCount() {
return waitingProcedures_.size();
}
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure getWaitingProcedures(int index) {
return waitingProcedures_.get(index);
}
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getWaitingProceduresOrBuilder(
int index) {
return waitingProcedures_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasResourceType()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasLockType()) {
memoizedIsInitialized = 0;
return false;
}
if (hasExclusiveLockOwnerProcedure()) {
if (!getExclusiveLockOwnerProcedure().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
for (int i = 0; i < getWaitingProceduresCount(); i++) {
if (!getWaitingProcedures(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeEnum(1, resourceType_);
}
if (((bitField0_ & 0x00000002) != 0)) {
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, resourceName_);
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeEnum(3, lockType_);
}
if (((bitField0_ & 0x00000008) != 0)) {
output.writeMessage(4, getExclusiveLockOwnerProcedure());
}
if (((bitField0_ & 0x00000010) != 0)) {
output.writeInt32(5, sharedLockCount_);
}
for (int i = 0; i < waitingProcedures_.size(); i++) {
output.writeMessage(6, waitingProcedures_.get(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeEnumSize(1, resourceType_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, resourceName_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeEnumSize(3, lockType_);
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(4, getExclusiveLockOwnerProcedure());
}
if (((bitField0_ & 0x00000010) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeInt32Size(5, sharedLockCount_);
}
for (int i = 0; i < waitingProcedures_.size(); i++) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(6, waitingProcedures_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource other = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource) obj;
if (hasResourceType() != other.hasResourceType()) return false;
if (hasResourceType()) {
if (resourceType_ != other.resourceType_) return false;
}
if (hasResourceName() != other.hasResourceName()) return false;
if (hasResourceName()) {
if (!getResourceName()
.equals(other.getResourceName())) return false;
}
if (hasLockType() != other.hasLockType()) return false;
if (hasLockType()) {
if (lockType_ != other.lockType_) return false;
}
if (hasExclusiveLockOwnerProcedure() != other.hasExclusiveLockOwnerProcedure()) return false;
if (hasExclusiveLockOwnerProcedure()) {
if (!getExclusiveLockOwnerProcedure()
.equals(other.getExclusiveLockOwnerProcedure())) return false;
}
if (hasSharedLockCount() != other.hasSharedLockCount()) return false;
if (hasSharedLockCount()) {
if (getSharedLockCount()
!= other.getSharedLockCount()) return false;
}
if (!getWaitingProceduresList()
.equals(other.getWaitingProceduresList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasResourceType()) {
hash = (37 * hash) + RESOURCE_TYPE_FIELD_NUMBER;
hash = (53 * hash) + resourceType_;
}
if (hasResourceName()) {
hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getResourceName().hashCode();
}
if (hasLockType()) {
hash = (37 * hash) + LOCK_TYPE_FIELD_NUMBER;
hash = (53 * hash) + lockType_;
}
if (hasExclusiveLockOwnerProcedure()) {
hash = (37 * hash) + EXCLUSIVE_LOCK_OWNER_PROCEDURE_FIELD_NUMBER;
hash = (53 * hash) + getExclusiveLockOwnerProcedure().hashCode();
}
if (hasSharedLockCount()) {
hash = (37 * hash) + SHARED_LOCK_COUNT_FIELD_NUMBER;
hash = (53 * hash) + getSharedLockCount();
}
if (getWaitingProceduresCount() > 0) {
hash = (37 * hash) + WAITINGPROCEDURES_FIELD_NUMBER;
hash = (53 * hash) + getWaitingProceduresList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.LockedResource}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hbase.pb.LockedResource)
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResourceOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockedResource_descriptor;
}
@java.lang.Override
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockedResource_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getExclusiveLockOwnerProcedureFieldBuilder();
getWaitingProceduresFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
resourceType_ = 1;
bitField0_ = (bitField0_ & ~0x00000001);
resourceName_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
lockType_ = 1;
bitField0_ = (bitField0_ & ~0x00000004);
if (exclusiveLockOwnerProcedureBuilder_ == null) {
exclusiveLockOwnerProcedure_ = null;
} else {
exclusiveLockOwnerProcedureBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000008);
sharedLockCount_ = 0;
bitField0_ = (bitField0_ & ~0x00000010);
if (waitingProceduresBuilder_ == null) {
waitingProcedures_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000020);
} else {
waitingProceduresBuilder_.clear();
}
return this;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockedResource_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource result = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.resourceType_ = resourceType_;
if (((from_bitField0_ & 0x00000002) != 0)) {
to_bitField0_ |= 0x00000002;
}
result.resourceName_ = resourceName_;
if (((from_bitField0_ & 0x00000004) != 0)) {
to_bitField0_ |= 0x00000004;
}
result.lockType_ = lockType_;
if (((from_bitField0_ & 0x00000008) != 0)) {
if (exclusiveLockOwnerProcedureBuilder_ == null) {
result.exclusiveLockOwnerProcedure_ = exclusiveLockOwnerProcedure_;
} else {
result.exclusiveLockOwnerProcedure_ = exclusiveLockOwnerProcedureBuilder_.build();
}
to_bitField0_ |= 0x00000008;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.sharedLockCount_ = sharedLockCount_;
to_bitField0_ |= 0x00000010;
}
if (waitingProceduresBuilder_ == null) {
if (((bitField0_ & 0x00000020) != 0)) {
waitingProcedures_ = java.util.Collections.unmodifiableList(waitingProcedures_);
bitField0_ = (bitField0_ & ~0x00000020);
}
result.waitingProcedures_ = waitingProcedures_;
} else {
result.waitingProcedures_ = waitingProceduresBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource.getDefaultInstance()) return this;
if (other.hasResourceType()) {
setResourceType(other.getResourceType());
}
if (other.hasResourceName()) {
bitField0_ |= 0x00000002;
resourceName_ = other.resourceName_;
onChanged();
}
if (other.hasLockType()) {
setLockType(other.getLockType());
}
if (other.hasExclusiveLockOwnerProcedure()) {
mergeExclusiveLockOwnerProcedure(other.getExclusiveLockOwnerProcedure());
}
if (other.hasSharedLockCount()) {
setSharedLockCount(other.getSharedLockCount());
}
if (waitingProceduresBuilder_ == null) {
if (!other.waitingProcedures_.isEmpty()) {
if (waitingProcedures_.isEmpty()) {
waitingProcedures_ = other.waitingProcedures_;
bitField0_ = (bitField0_ & ~0x00000020);
} else {
ensureWaitingProceduresIsMutable();
waitingProcedures_.addAll(other.waitingProcedures_);
}
onChanged();
}
} else {
if (!other.waitingProcedures_.isEmpty()) {
if (waitingProceduresBuilder_.isEmpty()) {
waitingProceduresBuilder_.dispose();
waitingProceduresBuilder_ = null;
waitingProcedures_ = other.waitingProcedures_;
bitField0_ = (bitField0_ & ~0x00000020);
waitingProceduresBuilder_ =
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getWaitingProceduresFieldBuilder() : null;
} else {
waitingProceduresBuilder_.addAllMessages(other.waitingProcedures_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasResourceType()) {
return false;
}
if (!hasLockType()) {
return false;
}
if (hasExclusiveLockOwnerProcedure()) {
if (!getExclusiveLockOwnerProcedure().isInitialized()) {
return false;
}
}
for (int i = 0; i < getWaitingProceduresCount(); i++) {
if (!getWaitingProcedures(i).isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int resourceType_ = 1;
/**
* required .hbase.pb.LockedResourceType resource_type = 1;
* @return Whether the resourceType field is set.
*/
@java.lang.Override public boolean hasResourceType() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required .hbase.pb.LockedResourceType resource_type = 1;
* @return The resourceType.
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResourceType getResourceType() {
@SuppressWarnings("deprecation")
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResourceType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResourceType.valueOf(resourceType_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResourceType.SERVER : result;
}
/**
* required .hbase.pb.LockedResourceType resource_type = 1;
* @param value The resourceType to set.
* @return This builder for chaining.
*/
public Builder setResourceType(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResourceType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
resourceType_ = value.getNumber();
onChanged();
return this;
}
/**
* required .hbase.pb.LockedResourceType resource_type = 1;
* @return This builder for chaining.
*/
public Builder clearResourceType() {
bitField0_ = (bitField0_ & ~0x00000001);
resourceType_ = 1;
onChanged();
return this;
}
private java.lang.Object resourceName_ = "";
/**
* optional string resource_name = 2;
* @return Whether the resourceName field is set.
*/
public boolean hasResourceName() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string resource_name = 2;
* @return The resourceName.
*/
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (!(ref instanceof java.lang.String)) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs =
(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
resourceName_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string resource_name = 2;
* @return The bytes for resourceName.
*/
public org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof String) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString b =
org.apache.hbase.thirdparty.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
}
}
/**
* optional string resource_name = 2;
* @param value The resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
resourceName_ = value;
onChanged();
return this;
}
/**
* optional string resource_name = 2;
* @return This builder for chaining.
*/
public Builder clearResourceName() {
bitField0_ = (bitField0_ & ~0x00000002);
resourceName_ = getDefaultInstance().getResourceName();
onChanged();
return this;
}
/**
* optional string resource_name = 2;
* @param value The bytes for resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceNameBytes(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
resourceName_ = value;
onChanged();
return this;
}
private int lockType_ = 1;
/**
* required .hbase.pb.LockType lock_type = 3;
* @return Whether the lockType field is set.
*/
@java.lang.Override public boolean hasLockType() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* required .hbase.pb.LockType lock_type = 3;
* @return The lockType.
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType() {
@SuppressWarnings("deprecation")
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(lockType_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.EXCLUSIVE : result;
}
/**
* required .hbase.pb.LockType lock_type = 3;
* @param value The lockType to set.
* @return This builder for chaining.
*/
public Builder setLockType(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
lockType_ = value.getNumber();
onChanged();
return this;
}
/**
* required .hbase.pb.LockType lock_type = 3;
* @return This builder for chaining.
*/
public Builder clearLockType() {
bitField0_ = (bitField0_ & ~0x00000004);
lockType_ = 1;
onChanged();
return this;
}
private org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure exclusiveLockOwnerProcedure_;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder> exclusiveLockOwnerProcedureBuilder_;
/**
* optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;
* @return Whether the exclusiveLockOwnerProcedure field is set.
*/
public boolean hasExclusiveLockOwnerProcedure() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;
* @return The exclusiveLockOwnerProcedure.
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure getExclusiveLockOwnerProcedure() {
if (exclusiveLockOwnerProcedureBuilder_ == null) {
return exclusiveLockOwnerProcedure_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance() : exclusiveLockOwnerProcedure_;
} else {
return exclusiveLockOwnerProcedureBuilder_.getMessage();
}
}
/**
* optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;
*/
public Builder setExclusiveLockOwnerProcedure(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure value) {
if (exclusiveLockOwnerProcedureBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
exclusiveLockOwnerProcedure_ = value;
onChanged();
} else {
exclusiveLockOwnerProcedureBuilder_.setMessage(value);
}
bitField0_ |= 0x00000008;
return this;
}
/**
* optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;
*/
public Builder setExclusiveLockOwnerProcedure(
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue) {
if (exclusiveLockOwnerProcedureBuilder_ == null) {
exclusiveLockOwnerProcedure_ = builderForValue.build();
onChanged();
} else {
exclusiveLockOwnerProcedureBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000008;
return this;
}
/**
* optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;
*/
public Builder mergeExclusiveLockOwnerProcedure(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure value) {
if (exclusiveLockOwnerProcedureBuilder_ == null) {
if (((bitField0_ & 0x00000008) != 0) &&
exclusiveLockOwnerProcedure_ != null &&
exclusiveLockOwnerProcedure_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance()) {
exclusiveLockOwnerProcedure_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.newBuilder(exclusiveLockOwnerProcedure_).mergeFrom(value).buildPartial();
} else {
exclusiveLockOwnerProcedure_ = value;
}
onChanged();
} else {
exclusiveLockOwnerProcedureBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000008;
return this;
}
/**
* optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;
*/
public Builder clearExclusiveLockOwnerProcedure() {
if (exclusiveLockOwnerProcedureBuilder_ == null) {
exclusiveLockOwnerProcedure_ = null;
onChanged();
} else {
exclusiveLockOwnerProcedureBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
/**
* optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder getExclusiveLockOwnerProcedureBuilder() {
bitField0_ |= 0x00000008;
onChanged();
return getExclusiveLockOwnerProcedureFieldBuilder().getBuilder();
}
/**
* optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getExclusiveLockOwnerProcedureOrBuilder() {
if (exclusiveLockOwnerProcedureBuilder_ != null) {
return exclusiveLockOwnerProcedureBuilder_.getMessageOrBuilder();
} else {
return exclusiveLockOwnerProcedure_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance() : exclusiveLockOwnerProcedure_;
}
}
/**
* optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>
getExclusiveLockOwnerProcedureFieldBuilder() {
if (exclusiveLockOwnerProcedureBuilder_ == null) {
exclusiveLockOwnerProcedureBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>(
getExclusiveLockOwnerProcedure(),
getParentForChildren(),
isClean());
exclusiveLockOwnerProcedure_ = null;
}
return exclusiveLockOwnerProcedureBuilder_;
}
private int sharedLockCount_ ;
/**
* optional int32 shared_lock_count = 5;
* @return Whether the sharedLockCount field is set.
*/
@java.lang.Override
public boolean hasSharedLockCount() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* optional int32 shared_lock_count = 5;
* @return The sharedLockCount.
*/
@java.lang.Override
public int getSharedLockCount() {
return sharedLockCount_;
}
/**
* optional int32 shared_lock_count = 5;
* @param value The sharedLockCount to set.
* @return This builder for chaining.
*/
public Builder setSharedLockCount(int value) {
bitField0_ |= 0x00000010;
sharedLockCount_ = value;
onChanged();
return this;
}
/**
* optional int32 shared_lock_count = 5;
* @return This builder for chaining.
*/
public Builder clearSharedLockCount() {
bitField0_ = (bitField0_ & ~0x00000010);
sharedLockCount_ = 0;
onChanged();
return this;
}
private java.util.List waitingProcedures_ =
java.util.Collections.emptyList();
private void ensureWaitingProceduresIsMutable() {
if (!((bitField0_ & 0x00000020) != 0)) {
waitingProcedures_ = new java.util.ArrayList(waitingProcedures_);
bitField0_ |= 0x00000020;
}
}
private org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder> waitingProceduresBuilder_;
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
public java.util.List getWaitingProceduresList() {
if (waitingProceduresBuilder_ == null) {
return java.util.Collections.unmodifiableList(waitingProcedures_);
} else {
return waitingProceduresBuilder_.getMessageList();
}
}
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
public int getWaitingProceduresCount() {
if (waitingProceduresBuilder_ == null) {
return waitingProcedures_.size();
} else {
return waitingProceduresBuilder_.getCount();
}
}
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure getWaitingProcedures(int index) {
if (waitingProceduresBuilder_ == null) {
return waitingProcedures_.get(index);
} else {
return waitingProceduresBuilder_.getMessage(index);
}
}
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
public Builder setWaitingProcedures(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure value) {
if (waitingProceduresBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureWaitingProceduresIsMutable();
waitingProcedures_.set(index, value);
onChanged();
} else {
waitingProceduresBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
public Builder setWaitingProcedures(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue) {
if (waitingProceduresBuilder_ == null) {
ensureWaitingProceduresIsMutable();
waitingProcedures_.set(index, builderForValue.build());
onChanged();
} else {
waitingProceduresBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
public Builder addWaitingProcedures(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure value) {
if (waitingProceduresBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureWaitingProceduresIsMutable();
waitingProcedures_.add(value);
onChanged();
} else {
waitingProceduresBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
public Builder addWaitingProcedures(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure value) {
if (waitingProceduresBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureWaitingProceduresIsMutable();
waitingProcedures_.add(index, value);
onChanged();
} else {
waitingProceduresBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
public Builder addWaitingProcedures(
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue) {
if (waitingProceduresBuilder_ == null) {
ensureWaitingProceduresIsMutable();
waitingProcedures_.add(builderForValue.build());
onChanged();
} else {
waitingProceduresBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
public Builder addWaitingProcedures(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue) {
if (waitingProceduresBuilder_ == null) {
ensureWaitingProceduresIsMutable();
waitingProcedures_.add(index, builderForValue.build());
onChanged();
} else {
waitingProceduresBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
public Builder addAllWaitingProcedures(
java.lang.Iterable values) {
if (waitingProceduresBuilder_ == null) {
ensureWaitingProceduresIsMutable();
org.apache.hbase.thirdparty.com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, waitingProcedures_);
onChanged();
} else {
waitingProceduresBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
public Builder clearWaitingProcedures() {
if (waitingProceduresBuilder_ == null) {
waitingProcedures_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000020);
onChanged();
} else {
waitingProceduresBuilder_.clear();
}
return this;
}
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
public Builder removeWaitingProcedures(int index) {
if (waitingProceduresBuilder_ == null) {
ensureWaitingProceduresIsMutable();
waitingProcedures_.remove(index);
onChanged();
} else {
waitingProceduresBuilder_.remove(index);
}
return this;
}
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder getWaitingProceduresBuilder(
int index) {
return getWaitingProceduresFieldBuilder().getBuilder(index);
}
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getWaitingProceduresOrBuilder(
int index) {
if (waitingProceduresBuilder_ == null) {
return waitingProcedures_.get(index); } else {
return waitingProceduresBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
public java.util.List
getWaitingProceduresOrBuilderList() {
if (waitingProceduresBuilder_ != null) {
return waitingProceduresBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(waitingProcedures_);
}
}
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder addWaitingProceduresBuilder() {
return getWaitingProceduresFieldBuilder().addBuilder(
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance());
}
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder addWaitingProceduresBuilder(
int index) {
return getWaitingProceduresFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance());
}
/**
* repeated .hbase.pb.Procedure waitingProcedures = 6;
*/
public java.util.List
getWaitingProceduresBuilderList() {
return getWaitingProceduresFieldBuilder().getBuilderList();
}
private org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>
getWaitingProceduresFieldBuilder() {
if (waitingProceduresBuilder_ == null) {
waitingProceduresBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>(
waitingProcedures_,
((bitField0_ & 0x00000020) != 0),
getParentForChildren(),
isClean());
waitingProcedures_ = null;
}
return waitingProceduresBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.LockedResource)
}
// @@protoc_insertion_point(class_scope:hbase.pb.LockedResource)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser() {
@java.lang.Override
public LockedResource parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new LockedResource(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockedResource getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
/**
* Protobuf service {@code hbase.pb.LockService}
*/
public static abstract class LockService
implements org.apache.hbase.thirdparty.com.google.protobuf.Service {
protected LockService() {}
public interface Interface {
/**
*
** Acquire lock on namespace/table/region
*
*
* rpc RequestLock(.hbase.pb.LockRequest) returns (.hbase.pb.LockResponse);
*/
public abstract void requestLock(
org.apache.hbase.thirdparty.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest request,
org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback done);
/**
*
** Keep alive (or not) a previously acquired lock
*
*
* rpc LockHeartbeat(.hbase.pb.LockHeartbeatRequest) returns (.hbase.pb.LockHeartbeatResponse);
*/
public abstract void lockHeartbeat(
org.apache.hbase.thirdparty.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest request,
org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback done);
}
public static org.apache.hbase.thirdparty.com.google.protobuf.Service newReflectiveService(
final Interface impl) {
return new LockService() {
@java.lang.Override
public void requestLock(
org.apache.hbase.thirdparty.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest request,
org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback done) {
impl.requestLock(controller, request, done);
}
@java.lang.Override
public void lockHeartbeat(
org.apache.hbase.thirdparty.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest request,
org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback done) {
impl.lockHeartbeat(controller, request, done);
}
};
}
public static org.apache.hbase.thirdparty.com.google.protobuf.BlockingService
newReflectiveBlockingService(final BlockingInterface impl) {
return new org.apache.hbase.thirdparty.com.google.protobuf.BlockingService() {
public final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final org.apache.hbase.thirdparty.com.google.protobuf.Message callBlockingMethod(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.MethodDescriptor method,
org.apache.hbase.thirdparty.com.google.protobuf.RpcController controller,
org.apache.hbase.thirdparty.com.google.protobuf.Message request)
throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callBlockingMethod() given method descriptor for " +
"wrong service type.");
}
switch(method.getIndex()) {
case 0:
return impl.requestLock(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest)request);
case 1:
return impl.lockHeartbeat(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest)request);
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final org.apache.hbase.thirdparty.com.google.protobuf.Message
getRequestPrototype(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.getDefaultInstance();
case 1:
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final org.apache.hbase.thirdparty.com.google.protobuf.Message
getResponsePrototype(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.getDefaultInstance();
case 1:
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
};
}
/**
*
** Acquire lock on namespace/table/region
*
*
* rpc RequestLock(.hbase.pb.LockRequest) returns (.hbase.pb.LockResponse);
*/
public abstract void requestLock(
org.apache.hbase.thirdparty.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest request,
org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback done);
/**
*
** Keep alive (or not) a previously acquired lock
*
*
* rpc LockHeartbeat(.hbase.pb.LockHeartbeatRequest) returns (.hbase.pb.LockHeartbeatResponse);
*/
public abstract void lockHeartbeat(
org.apache.hbase.thirdparty.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest request,
org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback done);
public static final
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.getDescriptor().getServices().get(0);
}
public final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final void callMethod(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.MethodDescriptor method,
org.apache.hbase.thirdparty.com.google.protobuf.RpcController controller,
org.apache.hbase.thirdparty.com.google.protobuf.Message request,
org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback<
org.apache.hbase.thirdparty.com.google.protobuf.Message> done) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callMethod() given method descriptor for wrong " +
"service type.");
}
switch(method.getIndex()) {
case 0:
this.requestLock(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest)request,
org.apache.hbase.thirdparty.com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
case 1:
this.lockHeartbeat(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest)request,
org.apache.hbase.thirdparty.com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final org.apache.hbase.thirdparty.com.google.protobuf.Message
getRequestPrototype(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.getDefaultInstance();
case 1:
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final org.apache.hbase.thirdparty.com.google.protobuf.Message
getResponsePrototype(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.getDefaultInstance();
case 1:
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public static Stub newStub(
org.apache.hbase.thirdparty.com.google.protobuf.RpcChannel channel) {
return new Stub(channel);
}
@javax.annotation.Generated("proto") public static final class Stub extends org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockService implements Interface {
private Stub(org.apache.hbase.thirdparty.com.google.protobuf.RpcChannel channel) {
this.channel = channel;
}
private final org.apache.hbase.thirdparty.com.google.protobuf.RpcChannel channel;
public org.apache.hbase.thirdparty.com.google.protobuf.RpcChannel getChannel() {
return channel;
}
public void requestLock(
org.apache.hbase.thirdparty.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest request,
org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.getDefaultInstance(),
org.apache.hbase.thirdparty.com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.class,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.getDefaultInstance()));
}
public void lockHeartbeat(
org.apache.hbase.thirdparty.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest request,
org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(1),
controller,
request,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.getDefaultInstance(),
org.apache.hbase.thirdparty.com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.class,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.getDefaultInstance()));
}
}
public static BlockingInterface newBlockingStub(
org.apache.hbase.thirdparty.com.google.protobuf.BlockingRpcChannel channel) {
return new BlockingStub(channel);
}
public interface BlockingInterface {
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse requestLock(
org.apache.hbase.thirdparty.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest request)
throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse lockHeartbeat(
org.apache.hbase.thirdparty.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest request)
throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
}
private static final class BlockingStub implements BlockingInterface {
private BlockingStub(org.apache.hbase.thirdparty.com.google.protobuf.BlockingRpcChannel channel) {
this.channel = channel;
}
private final org.apache.hbase.thirdparty.com.google.protobuf.BlockingRpcChannel channel;
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse requestLock(
org.apache.hbase.thirdparty.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest request)
throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException {
return (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.getDefaultInstance());
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse lockHeartbeat(
org.apache.hbase.thirdparty.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest request)
throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException {
return (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(1),
controller,
request,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.getDefaultInstance());
}
}
// @@protoc_insertion_point(class_scope:hbase.pb.LockService)
}
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_LockRequest_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_LockRequest_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_LockResponse_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_LockResponse_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_LockHeartbeatRequest_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_LockHeartbeatRequest_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_LockHeartbeatResponse_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_LockHeartbeatResponse_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_LockProcedureData_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_LockProcedureData_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_LockedResource_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_LockedResource_fieldAccessorTable;
public static org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\021LockService.proto\022\010hbase.pb\032\013HBase.pro" +
"to\032\017Procedure.proto\"\332\001\n\013LockRequest\022%\n\tl" +
"ock_type\030\001 \002(\0162\022.hbase.pb.LockType\022\021\n\tna" +
"mespace\030\002 \001(\t\022\'\n\ntable_name\030\003 \001(\0132\023.hbas" +
"e.pb.TableName\022)\n\013region_info\030\004 \003(\0132\024.hb" +
"ase.pb.RegionInfo\022\023\n\013description\030\005 \001(\t\022\026" +
"\n\013nonce_group\030\006 \001(\004:\0010\022\020\n\005nonce\030\007 \001(\004:\0010" +
"\"\037\n\014LockResponse\022\017\n\007proc_id\030\001 \002(\004\"A\n\024Loc" +
"kHeartbeatRequest\022\017\n\007proc_id\030\001 \002(\004\022\030\n\nke" +
"ep_alive\030\002 \001(\010:\004true\"\224\001\n\025LockHeartbeatRe" +
"sponse\022?\n\013lock_status\030\001 \002(\0162*.hbase.pb.L" +
"ockHeartbeatResponse.LockStatus\022\022\n\ntimeo" +
"ut_ms\030\002 \001(\r\"&\n\nLockStatus\022\014\n\010UNLOCKED\020\001\022" +
"\n\n\006LOCKED\020\002\"\325\001\n\021LockProcedureData\022%\n\tloc" +
"k_type\030\001 \002(\0162\022.hbase.pb.LockType\022\021\n\tname" +
"space\030\002 \001(\t\022\'\n\ntable_name\030\003 \001(\0132\023.hbase." +
"pb.TableName\022)\n\013region_info\030\004 \003(\0132\024.hbas" +
"e.pb.RegionInfo\022\023\n\013description\030\005 \001(\t\022\035\n\016" +
"is_master_lock\030\006 \001(\010:\005false\"\213\002\n\016LockedRe" +
"source\0223\n\rresource_type\030\001 \002(\0162\034.hbase.pb" +
".LockedResourceType\022\025\n\rresource_name\030\002 \001" +
"(\t\022%\n\tlock_type\030\003 \002(\0162\022.hbase.pb.LockTyp" +
"e\022;\n\036exclusive_lock_owner_procedure\030\004 \001(" +
"\0132\023.hbase.pb.Procedure\022\031\n\021shared_lock_co" +
"unt\030\005 \001(\005\022.\n\021waitingProcedures\030\006 \003(\0132\023.h" +
"base.pb.Procedure*%\n\010LockType\022\r\n\tEXCLUSI" +
"VE\020\001\022\n\n\006SHARED\020\002*P\n\022LockedResourceType\022\n" +
"\n\006SERVER\020\001\022\r\n\tNAMESPACE\020\002\022\t\n\005TABLE\020\003\022\n\n\006" +
"REGION\020\004\022\010\n\004PEER\020\0052\235\001\n\013LockService\022<\n\013Re" +
"questLock\022\025.hbase.pb.LockRequest\032\026.hbase" +
".pb.LockResponse\022P\n\rLockHeartbeat\022\036.hbas" +
"e.pb.LockHeartbeatRequest\032\037.hbase.pb.Loc" +
"kHeartbeatResponseBN\n1org.apache.hadoop." +
"hbase.shaded.protobuf.generatedB\021LockSer" +
"viceProtosH\001\210\001\001\240\001\001"
};
descriptor = org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FileDescriptor[] {
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(),
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.getDescriptor(),
});
internal_static_hbase_pb_LockRequest_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_hbase_pb_LockRequest_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_LockRequest_descriptor,
new java.lang.String[] { "LockType", "Namespace", "TableName", "RegionInfo", "Description", "NonceGroup", "Nonce", });
internal_static_hbase_pb_LockResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_hbase_pb_LockResponse_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_LockResponse_descriptor,
new java.lang.String[] { "ProcId", });
internal_static_hbase_pb_LockHeartbeatRequest_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_hbase_pb_LockHeartbeatRequest_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_LockHeartbeatRequest_descriptor,
new java.lang.String[] { "ProcId", "KeepAlive", });
internal_static_hbase_pb_LockHeartbeatResponse_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_hbase_pb_LockHeartbeatResponse_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_LockHeartbeatResponse_descriptor,
new java.lang.String[] { "LockStatus", "TimeoutMs", });
internal_static_hbase_pb_LockProcedureData_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_hbase_pb_LockProcedureData_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_LockProcedureData_descriptor,
new java.lang.String[] { "LockType", "Namespace", "TableName", "RegionInfo", "Description", "IsMasterLock", });
internal_static_hbase_pb_LockedResource_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_hbase_pb_LockedResource_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_LockedResource_descriptor,
new java.lang.String[] { "ResourceType", "ResourceName", "LockType", "ExclusiveLockOwnerProcedure", "SharedLockCount", "WaitingProcedures", });
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor();
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}