Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
org.apache.hudi.org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos Maven / Gradle / Ivy
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: BucketCacheEntry.proto
package org.apache.hadoop.hbase.shaded.protobuf.generated;
@javax.annotation.Generated("proto") public final class BucketCacheProtos {
private BucketCacheProtos() {}
public static void registerAllExtensions(
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite) registry);
}
/**
* Protobuf enum {@code hbase.pb.BlockType}
*/
public enum BlockType
implements org.apache.hbase.thirdparty.com.google.protobuf.ProtocolMessageEnum {
/**
* data = 0;
*/
data(0),
/**
* encoded_data = 1;
*/
encoded_data(1),
/**
* leaf_index = 2;
*/
leaf_index(2),
/**
* bloom_chunk = 3;
*/
bloom_chunk(3),
/**
* meta = 4;
*/
meta(4),
/**
* intermediate_index = 5;
*/
intermediate_index(5),
/**
* root_index = 6;
*/
root_index(6),
/**
* file_info = 7;
*/
file_info(7),
/**
* general_bloom_meta = 8;
*/
general_bloom_meta(8),
/**
* delete_family_bloom_meta = 9;
*/
delete_family_bloom_meta(9),
/**
* trailer = 10;
*/
trailer(10),
/**
* index_v1 = 11;
*/
index_v1(11),
;
/**
* data = 0;
*/
public static final int data_VALUE = 0;
/**
* encoded_data = 1;
*/
public static final int encoded_data_VALUE = 1;
/**
* leaf_index = 2;
*/
public static final int leaf_index_VALUE = 2;
/**
* bloom_chunk = 3;
*/
public static final int bloom_chunk_VALUE = 3;
/**
* meta = 4;
*/
public static final int meta_VALUE = 4;
/**
* intermediate_index = 5;
*/
public static final int intermediate_index_VALUE = 5;
/**
* root_index = 6;
*/
public static final int root_index_VALUE = 6;
/**
* file_info = 7;
*/
public static final int file_info_VALUE = 7;
/**
* general_bloom_meta = 8;
*/
public static final int general_bloom_meta_VALUE = 8;
/**
* delete_family_bloom_meta = 9;
*/
public static final int delete_family_bloom_meta_VALUE = 9;
/**
* trailer = 10;
*/
public static final int trailer_VALUE = 10;
/**
* index_v1 = 11;
*/
public static final int index_v1_VALUE = 11;
public final int getNumber() {
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static BlockType valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static BlockType forNumber(int value) {
switch (value) {
case 0: return data;
case 1: return encoded_data;
case 2: return leaf_index;
case 3: return bloom_chunk;
case 4: return meta;
case 5: return intermediate_index;
case 6: return root_index;
case 7: return file_info;
case 8: return general_bloom_meta;
case 9: return delete_family_bloom_meta;
case 10: return trailer;
case 11: return index_v1;
default: return null;
}
}
public static org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap<
BlockType> internalValueMap =
new org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap() {
public BlockType findValueByNumber(int number) {
return BlockType.forNumber(number);
}
};
public final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.getDescriptor().getEnumTypes().get(0);
}
private static final BlockType[] VALUES = values();
public static BlockType valueOf(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private BlockType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hbase.pb.BlockType)
}
/**
* Protobuf enum {@code hbase.pb.BlockPriority}
*/
public enum BlockPriority
implements org.apache.hbase.thirdparty.com.google.protobuf.ProtocolMessageEnum {
/**
* single = 0;
*/
single(0),
/**
* multi = 1;
*/
multi(1),
/**
* memory = 2;
*/
memory(2),
;
/**
* single = 0;
*/
public static final int single_VALUE = 0;
/**
* multi = 1;
*/
public static final int multi_VALUE = 1;
/**
* memory = 2;
*/
public static final int memory_VALUE = 2;
public final int getNumber() {
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static BlockPriority valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static BlockPriority forNumber(int value) {
switch (value) {
case 0: return single;
case 1: return multi;
case 2: return memory;
default: return null;
}
}
public static org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap<
BlockPriority> internalValueMap =
new org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap() {
public BlockPriority findValueByNumber(int number) {
return BlockPriority.forNumber(number);
}
};
public final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.getDescriptor().getEnumTypes().get(1);
}
private static final BlockPriority[] VALUES = values();
public static BlockPriority valueOf(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private BlockPriority(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hbase.pb.BlockPriority)
}
public interface BucketCacheEntryOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.BucketCacheEntry)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* required int64 cache_capacity = 1;
* @return Whether the cacheCapacity field is set.
*/
boolean hasCacheCapacity();
/**
* required int64 cache_capacity = 1;
* @return The cacheCapacity.
*/
long getCacheCapacity();
/**
* required string io_class = 2;
* @return Whether the ioClass field is set.
*/
boolean hasIoClass();
/**
* required string io_class = 2;
* @return The ioClass.
*/
java.lang.String getIoClass();
/**
* required string io_class = 2;
* @return The bytes for ioClass.
*/
org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getIoClassBytes();
/**
* required string map_class = 3;
* @return Whether the mapClass field is set.
*/
boolean hasMapClass();
/**
* required string map_class = 3;
* @return The mapClass.
*/
java.lang.String getMapClass();
/**
* required string map_class = 3;
* @return The bytes for mapClass.
*/
org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getMapClassBytes();
/**
* map<int32, string> deserializers = 4;
*/
int getDeserializersCount();
/**
* map<int32, string> deserializers = 4;
*/
boolean containsDeserializers(
int key);
/**
* Use {@link #getDeserializersMap()} instead.
*/
@java.lang.Deprecated
java.util.Map
getDeserializers();
/**
* map<int32, string> deserializers = 4;
*/
java.util.Map
getDeserializersMap();
/**
* map<int32, string> deserializers = 4;
*/
java.lang.String getDeserializersOrDefault(
int key,
java.lang.String defaultValue);
/**
* map<int32, string> deserializers = 4;
*/
java.lang.String getDeserializersOrThrow(
int key);
/**
* required .hbase.pb.BackingMap backing_map = 5;
* @return Whether the backingMap field is set.
*/
boolean hasBackingMap();
/**
* required .hbase.pb.BackingMap backing_map = 5;
* @return The backingMap.
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap getBackingMap();
/**
* required .hbase.pb.BackingMap backing_map = 5;
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapOrBuilder getBackingMapOrBuilder();
/**
* optional bytes checksum = 6;
* @return Whether the checksum field is set.
*/
boolean hasChecksum();
/**
* optional bytes checksum = 6;
* @return The checksum.
*/
org.apache.hbase.thirdparty.com.google.protobuf.ByteString getChecksum();
}
/**
* Protobuf type {@code hbase.pb.BucketCacheEntry}
*/
@javax.annotation.Generated("proto") public static final class BucketCacheEntry extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.BucketCacheEntry)
BucketCacheEntryOrBuilder {
private static final long serialVersionUID = 0L;
// Use BucketCacheEntry.newBuilder() to construct.
private BucketCacheEntry(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private BucketCacheEntry() {
ioClass_ = "";
mapClass_ = "";
checksum_ = org.apache.hbase.thirdparty.com.google.protobuf.ByteString.EMPTY;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new BucketCacheEntry();
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private BucketCacheEntry(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
bitField0_ |= 0x00000001;
cacheCapacity_ = input.readInt64();
break;
}
case 18: {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000002;
ioClass_ = bs;
break;
}
case 26: {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000004;
mapClass_ = bs;
break;
}
case 34: {
if (!((mutable_bitField0_ & 0x00000008) != 0)) {
deserializers_ = org.apache.hbase.thirdparty.com.google.protobuf.MapField.newMapField(
DeserializersDefaultEntryHolder.defaultEntry);
mutable_bitField0_ |= 0x00000008;
}
org.apache.hbase.thirdparty.com.google.protobuf.MapEntry
deserializers__ = input.readMessage(
DeserializersDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry);
deserializers_.getMutableMap().put(
deserializers__.getKey(), deserializers__.getValue());
break;
}
case 42: {
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap.Builder subBuilder = null;
if (((bitField0_ & 0x00000008) != 0)) {
subBuilder = backingMap_.toBuilder();
}
backingMap_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(backingMap_);
backingMap_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000008;
break;
}
case 50: {
bitField0_ |= 0x00000010;
checksum_ = input.readBytes();
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BucketCacheEntry_descriptor;
}
@SuppressWarnings({"rawtypes"})
@java.lang.Override
protected org.apache.hbase.thirdparty.com.google.protobuf.MapField internalGetMapField(
int number) {
switch (number) {
case 4:
return internalGetDeserializers();
default:
throw new RuntimeException(
"Invalid map field number: " + number);
}
}
@java.lang.Override
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BucketCacheEntry_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry.class, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry.Builder.class);
}
private int bitField0_;
public static final int CACHE_CAPACITY_FIELD_NUMBER = 1;
private long cacheCapacity_;
/**
* required int64 cache_capacity = 1;
* @return Whether the cacheCapacity field is set.
*/
@java.lang.Override
public boolean hasCacheCapacity() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required int64 cache_capacity = 1;
* @return The cacheCapacity.
*/
@java.lang.Override
public long getCacheCapacity() {
return cacheCapacity_;
}
public static final int IO_CLASS_FIELD_NUMBER = 2;
private volatile java.lang.Object ioClass_;
/**
* required string io_class = 2;
* @return Whether the ioClass field is set.
*/
@java.lang.Override
public boolean hasIoClass() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required string io_class = 2;
* @return The ioClass.
*/
@java.lang.Override
public java.lang.String getIoClass() {
java.lang.Object ref = ioClass_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs =
(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
ioClass_ = s;
}
return s;
}
}
/**
* required string io_class = 2;
* @return The bytes for ioClass.
*/
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getIoClassBytes() {
java.lang.Object ref = ioClass_;
if (ref instanceof java.lang.String) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString b =
org.apache.hbase.thirdparty.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
ioClass_ = b;
return b;
} else {
return (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
}
}
public static final int MAP_CLASS_FIELD_NUMBER = 3;
private volatile java.lang.Object mapClass_;
/**
* required string map_class = 3;
* @return Whether the mapClass field is set.
*/
@java.lang.Override
public boolean hasMapClass() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* required string map_class = 3;
* @return The mapClass.
*/
@java.lang.Override
public java.lang.String getMapClass() {
java.lang.Object ref = mapClass_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs =
(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
mapClass_ = s;
}
return s;
}
}
/**
* required string map_class = 3;
* @return The bytes for mapClass.
*/
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getMapClassBytes() {
java.lang.Object ref = mapClass_;
if (ref instanceof java.lang.String) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString b =
org.apache.hbase.thirdparty.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
mapClass_ = b;
return b;
} else {
return (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
}
}
public static final int DESERIALIZERS_FIELD_NUMBER = 4;
private static final class DeserializersDefaultEntryHolder {
static final org.apache.hbase.thirdparty.com.google.protobuf.MapEntry<
java.lang.Integer, java.lang.String> defaultEntry =
org.apache.hbase.thirdparty.com.google.protobuf.MapEntry
.newDefaultInstance(
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BucketCacheEntry_DeserializersEntry_descriptor,
org.apache.hbase.thirdparty.com.google.protobuf.WireFormat.FieldType.INT32,
0,
org.apache.hbase.thirdparty.com.google.protobuf.WireFormat.FieldType.STRING,
"");
}
private org.apache.hbase.thirdparty.com.google.protobuf.MapField<
java.lang.Integer, java.lang.String> deserializers_;
private org.apache.hbase.thirdparty.com.google.protobuf.MapField
internalGetDeserializers() {
if (deserializers_ == null) {
return org.apache.hbase.thirdparty.com.google.protobuf.MapField.emptyMapField(
DeserializersDefaultEntryHolder.defaultEntry);
}
return deserializers_;
}
public int getDeserializersCount() {
return internalGetDeserializers().getMap().size();
}
/**
* map<int32, string> deserializers = 4;
*/
@java.lang.Override
public boolean containsDeserializers(
int key) {
return internalGetDeserializers().getMap().containsKey(key);
}
/**
* Use {@link #getDeserializersMap()} instead.
*/
@java.lang.Override
@java.lang.Deprecated
public java.util.Map getDeserializers() {
return getDeserializersMap();
}
/**
* map<int32, string> deserializers = 4;
*/
@java.lang.Override
public java.util.Map getDeserializersMap() {
return internalGetDeserializers().getMap();
}
/**
* map<int32, string> deserializers = 4;
*/
@java.lang.Override
public java.lang.String getDeserializersOrDefault(
int key,
java.lang.String defaultValue) {
java.util.Map map =
internalGetDeserializers().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
* map<int32, string> deserializers = 4;
*/
@java.lang.Override
public java.lang.String getDeserializersOrThrow(
int key) {
java.util.Map map =
internalGetDeserializers().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
public static final int BACKING_MAP_FIELD_NUMBER = 5;
private org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap backingMap_;
/**
* required .hbase.pb.BackingMap backing_map = 5;
* @return Whether the backingMap field is set.
*/
@java.lang.Override
public boolean hasBackingMap() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* required .hbase.pb.BackingMap backing_map = 5;
* @return The backingMap.
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap getBackingMap() {
return backingMap_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap.getDefaultInstance() : backingMap_;
}
/**
* required .hbase.pb.BackingMap backing_map = 5;
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapOrBuilder getBackingMapOrBuilder() {
return backingMap_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap.getDefaultInstance() : backingMap_;
}
public static final int CHECKSUM_FIELD_NUMBER = 6;
private org.apache.hbase.thirdparty.com.google.protobuf.ByteString checksum_;
/**
* optional bytes checksum = 6;
* @return Whether the checksum field is set.
*/
@java.lang.Override
public boolean hasChecksum() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* optional bytes checksum = 6;
* @return The checksum.
*/
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.ByteString getChecksum() {
return checksum_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasCacheCapacity()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasIoClass()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasMapClass()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasBackingMap()) {
memoizedIsInitialized = 0;
return false;
}
if (!getBackingMap().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeInt64(1, cacheCapacity_);
}
if (((bitField0_ & 0x00000002) != 0)) {
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, ioClass_);
}
if (((bitField0_ & 0x00000004) != 0)) {
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.writeString(output, 3, mapClass_);
}
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.serializeIntegerMapTo(
output,
internalGetDeserializers(),
DeserializersDefaultEntryHolder.defaultEntry,
4);
if (((bitField0_ & 0x00000008) != 0)) {
output.writeMessage(5, getBackingMap());
}
if (((bitField0_ & 0x00000010) != 0)) {
output.writeBytes(6, checksum_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeInt64Size(1, cacheCapacity_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, ioClass_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.computeStringSize(3, mapClass_);
}
for (java.util.Map.Entry entry
: internalGetDeserializers().getMap().entrySet()) {
org.apache.hbase.thirdparty.com.google.protobuf.MapEntry
deserializers__ = DeserializersDefaultEntryHolder.defaultEntry.newBuilderForType()
.setKey(entry.getKey())
.setValue(entry.getValue())
.build();
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(4, deserializers__);
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(5, getBackingMap());
}
if (((bitField0_ & 0x00000010) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeBytesSize(6, checksum_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry other = (org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry) obj;
if (hasCacheCapacity() != other.hasCacheCapacity()) return false;
if (hasCacheCapacity()) {
if (getCacheCapacity()
!= other.getCacheCapacity()) return false;
}
if (hasIoClass() != other.hasIoClass()) return false;
if (hasIoClass()) {
if (!getIoClass()
.equals(other.getIoClass())) return false;
}
if (hasMapClass() != other.hasMapClass()) return false;
if (hasMapClass()) {
if (!getMapClass()
.equals(other.getMapClass())) return false;
}
if (!internalGetDeserializers().equals(
other.internalGetDeserializers())) return false;
if (hasBackingMap() != other.hasBackingMap()) return false;
if (hasBackingMap()) {
if (!getBackingMap()
.equals(other.getBackingMap())) return false;
}
if (hasChecksum() != other.hasChecksum()) return false;
if (hasChecksum()) {
if (!getChecksum()
.equals(other.getChecksum())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasCacheCapacity()) {
hash = (37 * hash) + CACHE_CAPACITY_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong(
getCacheCapacity());
}
if (hasIoClass()) {
hash = (37 * hash) + IO_CLASS_FIELD_NUMBER;
hash = (53 * hash) + getIoClass().hashCode();
}
if (hasMapClass()) {
hash = (37 * hash) + MAP_CLASS_FIELD_NUMBER;
hash = (53 * hash) + getMapClass().hashCode();
}
if (!internalGetDeserializers().getMap().isEmpty()) {
hash = (37 * hash) + DESERIALIZERS_FIELD_NUMBER;
hash = (53 * hash) + internalGetDeserializers().hashCode();
}
if (hasBackingMap()) {
hash = (37 * hash) + BACKING_MAP_FIELD_NUMBER;
hash = (53 * hash) + getBackingMap().hashCode();
}
if (hasChecksum()) {
hash = (37 * hash) + CHECKSUM_FIELD_NUMBER;
hash = (53 * hash) + getChecksum().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.BucketCacheEntry}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hbase.pb.BucketCacheEntry)
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntryOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BucketCacheEntry_descriptor;
}
@SuppressWarnings({"rawtypes"})
protected org.apache.hbase.thirdparty.com.google.protobuf.MapField internalGetMapField(
int number) {
switch (number) {
case 4:
return internalGetDeserializers();
default:
throw new RuntimeException(
"Invalid map field number: " + number);
}
}
@SuppressWarnings({"rawtypes"})
protected org.apache.hbase.thirdparty.com.google.protobuf.MapField internalGetMutableMapField(
int number) {
switch (number) {
case 4:
return internalGetMutableDeserializers();
default:
throw new RuntimeException(
"Invalid map field number: " + number);
}
}
@java.lang.Override
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BucketCacheEntry_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry.class, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getBackingMapFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
cacheCapacity_ = 0L;
bitField0_ = (bitField0_ & ~0x00000001);
ioClass_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
mapClass_ = "";
bitField0_ = (bitField0_ & ~0x00000004);
internalGetMutableDeserializers().clear();
if (backingMapBuilder_ == null) {
backingMap_ = null;
} else {
backingMapBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000010);
checksum_ = org.apache.hbase.thirdparty.com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000020);
return this;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BucketCacheEntry_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry result = new org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.cacheCapacity_ = cacheCapacity_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
to_bitField0_ |= 0x00000002;
}
result.ioClass_ = ioClass_;
if (((from_bitField0_ & 0x00000004) != 0)) {
to_bitField0_ |= 0x00000004;
}
result.mapClass_ = mapClass_;
result.deserializers_ = internalGetDeserializers();
result.deserializers_.makeImmutable();
if (((from_bitField0_ & 0x00000010) != 0)) {
if (backingMapBuilder_ == null) {
result.backingMap_ = backingMap_;
} else {
result.backingMap_ = backingMapBuilder_.build();
}
to_bitField0_ |= 0x00000008;
}
if (((from_bitField0_ & 0x00000020) != 0)) {
to_bitField0_ |= 0x00000010;
}
result.checksum_ = checksum_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry.getDefaultInstance()) return this;
if (other.hasCacheCapacity()) {
setCacheCapacity(other.getCacheCapacity());
}
if (other.hasIoClass()) {
bitField0_ |= 0x00000002;
ioClass_ = other.ioClass_;
onChanged();
}
if (other.hasMapClass()) {
bitField0_ |= 0x00000004;
mapClass_ = other.mapClass_;
onChanged();
}
internalGetMutableDeserializers().mergeFrom(
other.internalGetDeserializers());
if (other.hasBackingMap()) {
mergeBackingMap(other.getBackingMap());
}
if (other.hasChecksum()) {
setChecksum(other.getChecksum());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasCacheCapacity()) {
return false;
}
if (!hasIoClass()) {
return false;
}
if (!hasMapClass()) {
return false;
}
if (!hasBackingMap()) {
return false;
}
if (!getBackingMap().isInitialized()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private long cacheCapacity_ ;
/**
* required int64 cache_capacity = 1;
* @return Whether the cacheCapacity field is set.
*/
@java.lang.Override
public boolean hasCacheCapacity() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required int64 cache_capacity = 1;
* @return The cacheCapacity.
*/
@java.lang.Override
public long getCacheCapacity() {
return cacheCapacity_;
}
/**
* required int64 cache_capacity = 1;
* @param value The cacheCapacity to set.
* @return This builder for chaining.
*/
public Builder setCacheCapacity(long value) {
bitField0_ |= 0x00000001;
cacheCapacity_ = value;
onChanged();
return this;
}
/**
* required int64 cache_capacity = 1;
* @return This builder for chaining.
*/
public Builder clearCacheCapacity() {
bitField0_ = (bitField0_ & ~0x00000001);
cacheCapacity_ = 0L;
onChanged();
return this;
}
private java.lang.Object ioClass_ = "";
/**
* required string io_class = 2;
* @return Whether the ioClass field is set.
*/
public boolean hasIoClass() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required string io_class = 2;
* @return The ioClass.
*/
public java.lang.String getIoClass() {
java.lang.Object ref = ioClass_;
if (!(ref instanceof java.lang.String)) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs =
(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
ioClass_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string io_class = 2;
* @return The bytes for ioClass.
*/
public org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getIoClassBytes() {
java.lang.Object ref = ioClass_;
if (ref instanceof String) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString b =
org.apache.hbase.thirdparty.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
ioClass_ = b;
return b;
} else {
return (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
}
}
/**
* required string io_class = 2;
* @param value The ioClass to set.
* @return This builder for chaining.
*/
public Builder setIoClass(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
ioClass_ = value;
onChanged();
return this;
}
/**
* required string io_class = 2;
* @return This builder for chaining.
*/
public Builder clearIoClass() {
bitField0_ = (bitField0_ & ~0x00000002);
ioClass_ = getDefaultInstance().getIoClass();
onChanged();
return this;
}
/**
* required string io_class = 2;
* @param value The bytes for ioClass to set.
* @return This builder for chaining.
*/
public Builder setIoClassBytes(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
ioClass_ = value;
onChanged();
return this;
}
private java.lang.Object mapClass_ = "";
/**
* required string map_class = 3;
* @return Whether the mapClass field is set.
*/
public boolean hasMapClass() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* required string map_class = 3;
* @return The mapClass.
*/
public java.lang.String getMapClass() {
java.lang.Object ref = mapClass_;
if (!(ref instanceof java.lang.String)) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs =
(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
mapClass_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string map_class = 3;
* @return The bytes for mapClass.
*/
public org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getMapClassBytes() {
java.lang.Object ref = mapClass_;
if (ref instanceof String) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString b =
org.apache.hbase.thirdparty.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
mapClass_ = b;
return b;
} else {
return (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
}
}
/**
* required string map_class = 3;
* @param value The mapClass to set.
* @return This builder for chaining.
*/
public Builder setMapClass(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
mapClass_ = value;
onChanged();
return this;
}
/**
* required string map_class = 3;
* @return This builder for chaining.
*/
public Builder clearMapClass() {
bitField0_ = (bitField0_ & ~0x00000004);
mapClass_ = getDefaultInstance().getMapClass();
onChanged();
return this;
}
/**
* required string map_class = 3;
* @param value The bytes for mapClass to set.
* @return This builder for chaining.
*/
public Builder setMapClassBytes(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
mapClass_ = value;
onChanged();
return this;
}
private org.apache.hbase.thirdparty.com.google.protobuf.MapField<
java.lang.Integer, java.lang.String> deserializers_;
private org.apache.hbase.thirdparty.com.google.protobuf.MapField
internalGetDeserializers() {
if (deserializers_ == null) {
return org.apache.hbase.thirdparty.com.google.protobuf.MapField.emptyMapField(
DeserializersDefaultEntryHolder.defaultEntry);
}
return deserializers_;
}
private org.apache.hbase.thirdparty.com.google.protobuf.MapField
internalGetMutableDeserializers() {
onChanged();;
if (deserializers_ == null) {
deserializers_ = org.apache.hbase.thirdparty.com.google.protobuf.MapField.newMapField(
DeserializersDefaultEntryHolder.defaultEntry);
}
if (!deserializers_.isMutable()) {
deserializers_ = deserializers_.copy();
}
return deserializers_;
}
public int getDeserializersCount() {
return internalGetDeserializers().getMap().size();
}
/**
* map<int32, string> deserializers = 4;
*/
@java.lang.Override
public boolean containsDeserializers(
int key) {
return internalGetDeserializers().getMap().containsKey(key);
}
/**
* Use {@link #getDeserializersMap()} instead.
*/
@java.lang.Override
@java.lang.Deprecated
public java.util.Map getDeserializers() {
return getDeserializersMap();
}
/**
* map<int32, string> deserializers = 4;
*/
@java.lang.Override
public java.util.Map getDeserializersMap() {
return internalGetDeserializers().getMap();
}
/**
* map<int32, string> deserializers = 4;
*/
@java.lang.Override
public java.lang.String getDeserializersOrDefault(
int key,
java.lang.String defaultValue) {
java.util.Map map =
internalGetDeserializers().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
* map<int32, string> deserializers = 4;
*/
@java.lang.Override
public java.lang.String getDeserializersOrThrow(
int key) {
java.util.Map map =
internalGetDeserializers().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
public Builder clearDeserializers() {
internalGetMutableDeserializers().getMutableMap()
.clear();
return this;
}
/**
* map<int32, string> deserializers = 4;
*/
public Builder removeDeserializers(
int key) {
internalGetMutableDeserializers().getMutableMap()
.remove(key);
return this;
}
/**
* Use alternate mutation accessors instead.
*/
@java.lang.Deprecated
public java.util.Map
getMutableDeserializers() {
return internalGetMutableDeserializers().getMutableMap();
}
/**
* map<int32, string> deserializers = 4;
*/
public Builder putDeserializers(
int key,
java.lang.String value) {
if (value == null) { throw new java.lang.NullPointerException(); }
internalGetMutableDeserializers().getMutableMap()
.put(key, value);
return this;
}
/**
* map<int32, string> deserializers = 4;
*/
public Builder putAllDeserializers(
java.util.Map values) {
internalGetMutableDeserializers().getMutableMap()
.putAll(values);
return this;
}
private org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap backingMap_;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapOrBuilder> backingMapBuilder_;
/**
* required .hbase.pb.BackingMap backing_map = 5;
* @return Whether the backingMap field is set.
*/
public boolean hasBackingMap() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* required .hbase.pb.BackingMap backing_map = 5;
* @return The backingMap.
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap getBackingMap() {
if (backingMapBuilder_ == null) {
return backingMap_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap.getDefaultInstance() : backingMap_;
} else {
return backingMapBuilder_.getMessage();
}
}
/**
* required .hbase.pb.BackingMap backing_map = 5;
*/
public Builder setBackingMap(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap value) {
if (backingMapBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
backingMap_ = value;
onChanged();
} else {
backingMapBuilder_.setMessage(value);
}
bitField0_ |= 0x00000010;
return this;
}
/**
* required .hbase.pb.BackingMap backing_map = 5;
*/
public Builder setBackingMap(
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap.Builder builderForValue) {
if (backingMapBuilder_ == null) {
backingMap_ = builderForValue.build();
onChanged();
} else {
backingMapBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000010;
return this;
}
/**
* required .hbase.pb.BackingMap backing_map = 5;
*/
public Builder mergeBackingMap(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap value) {
if (backingMapBuilder_ == null) {
if (((bitField0_ & 0x00000010) != 0) &&
backingMap_ != null &&
backingMap_ != org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap.getDefaultInstance()) {
backingMap_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap.newBuilder(backingMap_).mergeFrom(value).buildPartial();
} else {
backingMap_ = value;
}
onChanged();
} else {
backingMapBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000010;
return this;
}
/**
* required .hbase.pb.BackingMap backing_map = 5;
*/
public Builder clearBackingMap() {
if (backingMapBuilder_ == null) {
backingMap_ = null;
onChanged();
} else {
backingMapBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000010);
return this;
}
/**
* required .hbase.pb.BackingMap backing_map = 5;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap.Builder getBackingMapBuilder() {
bitField0_ |= 0x00000010;
onChanged();
return getBackingMapFieldBuilder().getBuilder();
}
/**
* required .hbase.pb.BackingMap backing_map = 5;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapOrBuilder getBackingMapOrBuilder() {
if (backingMapBuilder_ != null) {
return backingMapBuilder_.getMessageOrBuilder();
} else {
return backingMap_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap.getDefaultInstance() : backingMap_;
}
}
/**
* required .hbase.pb.BackingMap backing_map = 5;
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapOrBuilder>
getBackingMapFieldBuilder() {
if (backingMapBuilder_ == null) {
backingMapBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapOrBuilder>(
getBackingMap(),
getParentForChildren(),
isClean());
backingMap_ = null;
}
return backingMapBuilder_;
}
private org.apache.hbase.thirdparty.com.google.protobuf.ByteString checksum_ = org.apache.hbase.thirdparty.com.google.protobuf.ByteString.EMPTY;
/**
* optional bytes checksum = 6;
* @return Whether the checksum field is set.
*/
@java.lang.Override
public boolean hasChecksum() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* optional bytes checksum = 6;
* @return The checksum.
*/
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.ByteString getChecksum() {
return checksum_;
}
/**
* optional bytes checksum = 6;
* @param value The checksum to set.
* @return This builder for chaining.
*/
public Builder setChecksum(org.apache.hbase.thirdparty.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000020;
checksum_ = value;
onChanged();
return this;
}
/**
* optional bytes checksum = 6;
* @return This builder for chaining.
*/
public Builder clearChecksum() {
bitField0_ = (bitField0_ & ~0x00000020);
checksum_ = getDefaultInstance().getChecksum();
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.BucketCacheEntry)
}
// @@protoc_insertion_point(class_scope:hbase.pb.BucketCacheEntry)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser() {
@java.lang.Override
public BucketCacheEntry parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new BucketCacheEntry(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface BackingMapOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.BackingMap)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
java.util.List
getEntryList();
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry getEntry(int index);
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
int getEntryCount();
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
java.util.List
getEntryOrBuilderList();
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntryOrBuilder getEntryOrBuilder(
int index);
}
/**
* Protobuf type {@code hbase.pb.BackingMap}
*/
@javax.annotation.Generated("proto") public static final class BackingMap extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.BackingMap)
BackingMapOrBuilder {
private static final long serialVersionUID = 0L;
// Use BackingMap.newBuilder() to construct.
private BackingMap(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private BackingMap() {
entry_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new BackingMap();
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private BackingMap(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
entry_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000001;
}
entry_.add(
input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry.PARSER, extensionRegistry));
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
entry_ = java.util.Collections.unmodifiableList(entry_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BackingMap_descriptor;
}
@java.lang.Override
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BackingMap_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap.class, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap.Builder.class);
}
public static final int ENTRY_FIELD_NUMBER = 1;
private java.util.List entry_;
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
@java.lang.Override
public java.util.List getEntryList() {
return entry_;
}
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
@java.lang.Override
public java.util.List
getEntryOrBuilderList() {
return entry_;
}
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
@java.lang.Override
public int getEntryCount() {
return entry_.size();
}
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry getEntry(int index) {
return entry_.get(index);
}
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntryOrBuilder getEntryOrBuilder(
int index) {
return entry_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
for (int i = 0; i < getEntryCount(); i++) {
if (!getEntry(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < entry_.size(); i++) {
output.writeMessage(1, entry_.get(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < entry_.size(); i++) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(1, entry_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap other = (org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap) obj;
if (!getEntryList()
.equals(other.getEntryList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getEntryCount() > 0) {
hash = (37 * hash) + ENTRY_FIELD_NUMBER;
hash = (53 * hash) + getEntryList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.BackingMap}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hbase.pb.BackingMap)
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BackingMap_descriptor;
}
@java.lang.Override
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BackingMap_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap.class, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getEntryFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (entryBuilder_ == null) {
entry_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
entryBuilder_.clear();
}
return this;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BackingMap_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap result = new org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap(this);
int from_bitField0_ = bitField0_;
if (entryBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
entry_ = java.util.Collections.unmodifiableList(entry_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.entry_ = entry_;
} else {
result.entry_ = entryBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap.getDefaultInstance()) return this;
if (entryBuilder_ == null) {
if (!other.entry_.isEmpty()) {
if (entry_.isEmpty()) {
entry_ = other.entry_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureEntryIsMutable();
entry_.addAll(other.entry_);
}
onChanged();
}
} else {
if (!other.entry_.isEmpty()) {
if (entryBuilder_.isEmpty()) {
entryBuilder_.dispose();
entryBuilder_ = null;
entry_ = other.entry_;
bitField0_ = (bitField0_ & ~0x00000001);
entryBuilder_ =
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getEntryFieldBuilder() : null;
} else {
entryBuilder_.addAllMessages(other.entry_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
for (int i = 0; i < getEntryCount(); i++) {
if (!getEntry(i).isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.util.List entry_ =
java.util.Collections.emptyList();
private void ensureEntryIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
entry_ = new java.util.ArrayList(entry_);
bitField0_ |= 0x00000001;
}
}
private org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntryOrBuilder> entryBuilder_;
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
public java.util.List getEntryList() {
if (entryBuilder_ == null) {
return java.util.Collections.unmodifiableList(entry_);
} else {
return entryBuilder_.getMessageList();
}
}
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
public int getEntryCount() {
if (entryBuilder_ == null) {
return entry_.size();
} else {
return entryBuilder_.getCount();
}
}
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry getEntry(int index) {
if (entryBuilder_ == null) {
return entry_.get(index);
} else {
return entryBuilder_.getMessage(index);
}
}
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
public Builder setEntry(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry value) {
if (entryBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEntryIsMutable();
entry_.set(index, value);
onChanged();
} else {
entryBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
public Builder setEntry(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry.Builder builderForValue) {
if (entryBuilder_ == null) {
ensureEntryIsMutable();
entry_.set(index, builderForValue.build());
onChanged();
} else {
entryBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
public Builder addEntry(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry value) {
if (entryBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEntryIsMutable();
entry_.add(value);
onChanged();
} else {
entryBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
public Builder addEntry(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry value) {
if (entryBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEntryIsMutable();
entry_.add(index, value);
onChanged();
} else {
entryBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
public Builder addEntry(
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry.Builder builderForValue) {
if (entryBuilder_ == null) {
ensureEntryIsMutable();
entry_.add(builderForValue.build());
onChanged();
} else {
entryBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
public Builder addEntry(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry.Builder builderForValue) {
if (entryBuilder_ == null) {
ensureEntryIsMutable();
entry_.add(index, builderForValue.build());
onChanged();
} else {
entryBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
public Builder addAllEntry(
java.lang.Iterable values) {
if (entryBuilder_ == null) {
ensureEntryIsMutable();
org.apache.hbase.thirdparty.com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, entry_);
onChanged();
} else {
entryBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
public Builder clearEntry() {
if (entryBuilder_ == null) {
entry_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
entryBuilder_.clear();
}
return this;
}
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
public Builder removeEntry(int index) {
if (entryBuilder_ == null) {
ensureEntryIsMutable();
entry_.remove(index);
onChanged();
} else {
entryBuilder_.remove(index);
}
return this;
}
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry.Builder getEntryBuilder(
int index) {
return getEntryFieldBuilder().getBuilder(index);
}
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntryOrBuilder getEntryOrBuilder(
int index) {
if (entryBuilder_ == null) {
return entry_.get(index); } else {
return entryBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
public java.util.List
getEntryOrBuilderList() {
if (entryBuilder_ != null) {
return entryBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(entry_);
}
}
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry.Builder addEntryBuilder() {
return getEntryFieldBuilder().addBuilder(
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry.getDefaultInstance());
}
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry.Builder addEntryBuilder(
int index) {
return getEntryFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry.getDefaultInstance());
}
/**
* repeated .hbase.pb.BackingMapEntry entry = 1;
*/
public java.util.List
getEntryBuilderList() {
return getEntryFieldBuilder().getBuilderList();
}
private org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntryOrBuilder>
getEntryFieldBuilder() {
if (entryBuilder_ == null) {
entryBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntryOrBuilder>(
entry_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
entry_ = null;
}
return entryBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.BackingMap)
}
// @@protoc_insertion_point(class_scope:hbase.pb.BackingMap)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser() {
@java.lang.Override
public BackingMap parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new BackingMap(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface BackingMapEntryOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.BackingMapEntry)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* required .hbase.pb.BlockCacheKey key = 1;
* @return Whether the key field is set.
*/
boolean hasKey();
/**
* required .hbase.pb.BlockCacheKey key = 1;
* @return The key.
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey getKey();
/**
* required .hbase.pb.BlockCacheKey key = 1;
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKeyOrBuilder getKeyOrBuilder();
/**
* required .hbase.pb.BucketEntry value = 2;
* @return Whether the value field is set.
*/
boolean hasValue();
/**
* required .hbase.pb.BucketEntry value = 2;
* @return The value.
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry getValue();
/**
* required .hbase.pb.BucketEntry value = 2;
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntryOrBuilder getValueOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.BackingMapEntry}
*/
@javax.annotation.Generated("proto") public static final class BackingMapEntry extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.BackingMapEntry)
BackingMapEntryOrBuilder {
private static final long serialVersionUID = 0L;
// Use BackingMapEntry.newBuilder() to construct.
private BackingMapEntry(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private BackingMapEntry() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new BackingMapEntry();
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private BackingMapEntry(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) != 0)) {
subBuilder = key_.toBuilder();
}
key_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(key_);
key_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
case 18: {
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) != 0)) {
subBuilder = value_.toBuilder();
}
value_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(value_);
value_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BackingMapEntry_descriptor;
}
@java.lang.Override
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BackingMapEntry_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry.class, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry.Builder.class);
}
private int bitField0_;
public static final int KEY_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey key_;
/**
* required .hbase.pb.BlockCacheKey key = 1;
* @return Whether the key field is set.
*/
@java.lang.Override
public boolean hasKey() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required .hbase.pb.BlockCacheKey key = 1;
* @return The key.
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey getKey() {
return key_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey.getDefaultInstance() : key_;
}
/**
* required .hbase.pb.BlockCacheKey key = 1;
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKeyOrBuilder getKeyOrBuilder() {
return key_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey.getDefaultInstance() : key_;
}
public static final int VALUE_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry value_;
/**
* required .hbase.pb.BucketEntry value = 2;
* @return Whether the value field is set.
*/
@java.lang.Override
public boolean hasValue() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required .hbase.pb.BucketEntry value = 2;
* @return The value.
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry getValue() {
return value_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry.getDefaultInstance() : value_;
}
/**
* required .hbase.pb.BucketEntry value = 2;
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntryOrBuilder getValueOrBuilder() {
return value_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry.getDefaultInstance() : value_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasKey()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasValue()) {
memoizedIsInitialized = 0;
return false;
}
if (!getKey().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
if (!getValue().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getKey());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getValue());
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(1, getKey());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getValue());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry other = (org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry) obj;
if (hasKey() != other.hasKey()) return false;
if (hasKey()) {
if (!getKey()
.equals(other.getKey())) return false;
}
if (hasValue() != other.hasValue()) return false;
if (hasValue()) {
if (!getValue()
.equals(other.getValue())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasKey()) {
hash = (37 * hash) + KEY_FIELD_NUMBER;
hash = (53 * hash) + getKey().hashCode();
}
if (hasValue()) {
hash = (37 * hash) + VALUE_FIELD_NUMBER;
hash = (53 * hash) + getValue().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.BackingMapEntry}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hbase.pb.BackingMapEntry)
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntryOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BackingMapEntry_descriptor;
}
@java.lang.Override
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BackingMapEntry_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry.class, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getKeyFieldBuilder();
getValueFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (keyBuilder_ == null) {
key_ = null;
} else {
keyBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
if (valueBuilder_ == null) {
value_ = null;
} else {
valueBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BackingMapEntry_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry result = new org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
if (keyBuilder_ == null) {
result.key_ = key_;
} else {
result.key_ = keyBuilder_.build();
}
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
if (valueBuilder_ == null) {
result.value_ = value_;
} else {
result.value_ = valueBuilder_.build();
}
to_bitField0_ |= 0x00000002;
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry.getDefaultInstance()) return this;
if (other.hasKey()) {
mergeKey(other.getKey());
}
if (other.hasValue()) {
mergeValue(other.getValue());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasKey()) {
return false;
}
if (!hasValue()) {
return false;
}
if (!getKey().isInitialized()) {
return false;
}
if (!getValue().isInitialized()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey key_;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKeyOrBuilder> keyBuilder_;
/**
* required .hbase.pb.BlockCacheKey key = 1;
* @return Whether the key field is set.
*/
public boolean hasKey() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required .hbase.pb.BlockCacheKey key = 1;
* @return The key.
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey getKey() {
if (keyBuilder_ == null) {
return key_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey.getDefaultInstance() : key_;
} else {
return keyBuilder_.getMessage();
}
}
/**
* required .hbase.pb.BlockCacheKey key = 1;
*/
public Builder setKey(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey value) {
if (keyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
key_ = value;
onChanged();
} else {
keyBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* required .hbase.pb.BlockCacheKey key = 1;
*/
public Builder setKey(
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey.Builder builderForValue) {
if (keyBuilder_ == null) {
key_ = builderForValue.build();
onChanged();
} else {
keyBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* required .hbase.pb.BlockCacheKey key = 1;
*/
public Builder mergeKey(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey value) {
if (keyBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
key_ != null &&
key_ != org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey.getDefaultInstance()) {
key_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey.newBuilder(key_).mergeFrom(value).buildPartial();
} else {
key_ = value;
}
onChanged();
} else {
keyBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* required .hbase.pb.BlockCacheKey key = 1;
*/
public Builder clearKey() {
if (keyBuilder_ == null) {
key_ = null;
onChanged();
} else {
keyBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* required .hbase.pb.BlockCacheKey key = 1;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey.Builder getKeyBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getKeyFieldBuilder().getBuilder();
}
/**
* required .hbase.pb.BlockCacheKey key = 1;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKeyOrBuilder getKeyOrBuilder() {
if (keyBuilder_ != null) {
return keyBuilder_.getMessageOrBuilder();
} else {
return key_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey.getDefaultInstance() : key_;
}
}
/**
* required .hbase.pb.BlockCacheKey key = 1;
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKeyOrBuilder>
getKeyFieldBuilder() {
if (keyBuilder_ == null) {
keyBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKeyOrBuilder>(
getKey(),
getParentForChildren(),
isClean());
key_ = null;
}
return keyBuilder_;
}
private org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry value_;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntryOrBuilder> valueBuilder_;
/**
* required .hbase.pb.BucketEntry value = 2;
* @return Whether the value field is set.
*/
public boolean hasValue() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required .hbase.pb.BucketEntry value = 2;
* @return The value.
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry getValue() {
if (valueBuilder_ == null) {
return value_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry.getDefaultInstance() : value_;
} else {
return valueBuilder_.getMessage();
}
}
/**
* required .hbase.pb.BucketEntry value = 2;
*/
public Builder setValue(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry value) {
if (valueBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
value_ = value;
onChanged();
} else {
valueBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* required .hbase.pb.BucketEntry value = 2;
*/
public Builder setValue(
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry.Builder builderForValue) {
if (valueBuilder_ == null) {
value_ = builderForValue.build();
onChanged();
} else {
valueBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
return this;
}
/**
* required .hbase.pb.BucketEntry value = 2;
*/
public Builder mergeValue(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry value) {
if (valueBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0) &&
value_ != null &&
value_ != org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry.getDefaultInstance()) {
value_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry.newBuilder(value_).mergeFrom(value).buildPartial();
} else {
value_ = value;
}
onChanged();
} else {
valueBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* required .hbase.pb.BucketEntry value = 2;
*/
public Builder clearValue() {
if (valueBuilder_ == null) {
value_ = null;
onChanged();
} else {
valueBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
/**
* required .hbase.pb.BucketEntry value = 2;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry.Builder getValueBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getValueFieldBuilder().getBuilder();
}
/**
* required .hbase.pb.BucketEntry value = 2;
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntryOrBuilder getValueOrBuilder() {
if (valueBuilder_ != null) {
return valueBuilder_.getMessageOrBuilder();
} else {
return value_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry.getDefaultInstance() : value_;
}
}
/**
* required .hbase.pb.BucketEntry value = 2;
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntryOrBuilder>
getValueFieldBuilder() {
if (valueBuilder_ == null) {
valueBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntryOrBuilder>(
getValue(),
getParentForChildren(),
isClean());
value_ = null;
}
return valueBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.BackingMapEntry)
}
// @@protoc_insertion_point(class_scope:hbase.pb.BackingMapEntry)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser() {
@java.lang.Override
public BackingMapEntry parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new BackingMapEntry(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMapEntry getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface BlockCacheKeyOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.BlockCacheKey)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* required string hfilename = 1;
* @return Whether the hfilename field is set.
*/
boolean hasHfilename();
/**
* required string hfilename = 1;
* @return The hfilename.
*/
java.lang.String getHfilename();
/**
* required string hfilename = 1;
* @return The bytes for hfilename.
*/
org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getHfilenameBytes();
/**
* required int64 offset = 2;
* @return Whether the offset field is set.
*/
boolean hasOffset();
/**
* required int64 offset = 2;
* @return The offset.
*/
long getOffset();
/**
* required .hbase.pb.BlockType block_type = 3;
* @return Whether the blockType field is set.
*/
boolean hasBlockType();
/**
* required .hbase.pb.BlockType block_type = 3;
* @return The blockType.
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockType getBlockType();
/**
* required bool primary_replica_block = 4;
* @return Whether the primaryReplicaBlock field is set.
*/
boolean hasPrimaryReplicaBlock();
/**
* required bool primary_replica_block = 4;
* @return The primaryReplicaBlock.
*/
boolean getPrimaryReplicaBlock();
}
/**
* Protobuf type {@code hbase.pb.BlockCacheKey}
*/
@javax.annotation.Generated("proto") public static final class BlockCacheKey extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.BlockCacheKey)
BlockCacheKeyOrBuilder {
private static final long serialVersionUID = 0L;
// Use BlockCacheKey.newBuilder() to construct.
private BlockCacheKey(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private BlockCacheKey() {
hfilename_ = "";
blockType_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new BlockCacheKey();
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private BlockCacheKey(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000001;
hfilename_ = bs;
break;
}
case 16: {
bitField0_ |= 0x00000002;
offset_ = input.readInt64();
break;
}
case 24: {
int rawValue = input.readEnum();
@SuppressWarnings("deprecation")
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockType value = org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(3, rawValue);
} else {
bitField0_ |= 0x00000004;
blockType_ = rawValue;
}
break;
}
case 32: {
bitField0_ |= 0x00000008;
primaryReplicaBlock_ = input.readBool();
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BlockCacheKey_descriptor;
}
@java.lang.Override
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BlockCacheKey_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey.class, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey.Builder.class);
}
private int bitField0_;
public static final int HFILENAME_FIELD_NUMBER = 1;
private volatile java.lang.Object hfilename_;
/**
* required string hfilename = 1;
* @return Whether the hfilename field is set.
*/
@java.lang.Override
public boolean hasHfilename() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string hfilename = 1;
* @return The hfilename.
*/
@java.lang.Override
public java.lang.String getHfilename() {
java.lang.Object ref = hfilename_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs =
(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
hfilename_ = s;
}
return s;
}
}
/**
* required string hfilename = 1;
* @return The bytes for hfilename.
*/
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getHfilenameBytes() {
java.lang.Object ref = hfilename_;
if (ref instanceof java.lang.String) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString b =
org.apache.hbase.thirdparty.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
hfilename_ = b;
return b;
} else {
return (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
}
}
public static final int OFFSET_FIELD_NUMBER = 2;
private long offset_;
/**
* required int64 offset = 2;
* @return Whether the offset field is set.
*/
@java.lang.Override
public boolean hasOffset() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required int64 offset = 2;
* @return The offset.
*/
@java.lang.Override
public long getOffset() {
return offset_;
}
public static final int BLOCK_TYPE_FIELD_NUMBER = 3;
private int blockType_;
/**
* required .hbase.pb.BlockType block_type = 3;
* @return Whether the blockType field is set.
*/
@java.lang.Override public boolean hasBlockType() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* required .hbase.pb.BlockType block_type = 3;
* @return The blockType.
*/
@java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockType getBlockType() {
@SuppressWarnings("deprecation")
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockType result = org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockType.valueOf(blockType_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockType.data : result;
}
public static final int PRIMARY_REPLICA_BLOCK_FIELD_NUMBER = 4;
private boolean primaryReplicaBlock_;
/**
* required bool primary_replica_block = 4;
* @return Whether the primaryReplicaBlock field is set.
*/
@java.lang.Override
public boolean hasPrimaryReplicaBlock() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* required bool primary_replica_block = 4;
* @return The primaryReplicaBlock.
*/
@java.lang.Override
public boolean getPrimaryReplicaBlock() {
return primaryReplicaBlock_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasHfilename()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasOffset()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasBlockType()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasPrimaryReplicaBlock()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, hfilename_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeInt64(2, offset_);
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeEnum(3, blockType_);
}
if (((bitField0_ & 0x00000008) != 0)) {
output.writeBool(4, primaryReplicaBlock_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, hfilename_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeInt64Size(2, offset_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeEnumSize(3, blockType_);
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeBoolSize(4, primaryReplicaBlock_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey other = (org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey) obj;
if (hasHfilename() != other.hasHfilename()) return false;
if (hasHfilename()) {
if (!getHfilename()
.equals(other.getHfilename())) return false;
}
if (hasOffset() != other.hasOffset()) return false;
if (hasOffset()) {
if (getOffset()
!= other.getOffset()) return false;
}
if (hasBlockType() != other.hasBlockType()) return false;
if (hasBlockType()) {
if (blockType_ != other.blockType_) return false;
}
if (hasPrimaryReplicaBlock() != other.hasPrimaryReplicaBlock()) return false;
if (hasPrimaryReplicaBlock()) {
if (getPrimaryReplicaBlock()
!= other.getPrimaryReplicaBlock()) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasHfilename()) {
hash = (37 * hash) + HFILENAME_FIELD_NUMBER;
hash = (53 * hash) + getHfilename().hashCode();
}
if (hasOffset()) {
hash = (37 * hash) + OFFSET_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong(
getOffset());
}
if (hasBlockType()) {
hash = (37 * hash) + BLOCK_TYPE_FIELD_NUMBER;
hash = (53 * hash) + blockType_;
}
if (hasPrimaryReplicaBlock()) {
hash = (37 * hash) + PRIMARY_REPLICA_BLOCK_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashBoolean(
getPrimaryReplicaBlock());
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.BlockCacheKey}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hbase.pb.BlockCacheKey)
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKeyOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BlockCacheKey_descriptor;
}
@java.lang.Override
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BlockCacheKey_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey.class, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
hfilename_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
offset_ = 0L;
bitField0_ = (bitField0_ & ~0x00000002);
blockType_ = 0;
bitField0_ = (bitField0_ & ~0x00000004);
primaryReplicaBlock_ = false;
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BlockCacheKey_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey result = new org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.hfilename_ = hfilename_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.offset_ = offset_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
to_bitField0_ |= 0x00000004;
}
result.blockType_ = blockType_;
if (((from_bitField0_ & 0x00000008) != 0)) {
result.primaryReplicaBlock_ = primaryReplicaBlock_;
to_bitField0_ |= 0x00000008;
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey.getDefaultInstance()) return this;
if (other.hasHfilename()) {
bitField0_ |= 0x00000001;
hfilename_ = other.hfilename_;
onChanged();
}
if (other.hasOffset()) {
setOffset(other.getOffset());
}
if (other.hasBlockType()) {
setBlockType(other.getBlockType());
}
if (other.hasPrimaryReplicaBlock()) {
setPrimaryReplicaBlock(other.getPrimaryReplicaBlock());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasHfilename()) {
return false;
}
if (!hasOffset()) {
return false;
}
if (!hasBlockType()) {
return false;
}
if (!hasPrimaryReplicaBlock()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object hfilename_ = "";
/**
* required string hfilename = 1;
* @return Whether the hfilename field is set.
*/
public boolean hasHfilename() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string hfilename = 1;
* @return The hfilename.
*/
public java.lang.String getHfilename() {
java.lang.Object ref = hfilename_;
if (!(ref instanceof java.lang.String)) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs =
(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
hfilename_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string hfilename = 1;
* @return The bytes for hfilename.
*/
public org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getHfilenameBytes() {
java.lang.Object ref = hfilename_;
if (ref instanceof String) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString b =
org.apache.hbase.thirdparty.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
hfilename_ = b;
return b;
} else {
return (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
}
}
/**
* required string hfilename = 1;
* @param value The hfilename to set.
* @return This builder for chaining.
*/
public Builder setHfilename(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
hfilename_ = value;
onChanged();
return this;
}
/**
* required string hfilename = 1;
* @return This builder for chaining.
*/
public Builder clearHfilename() {
bitField0_ = (bitField0_ & ~0x00000001);
hfilename_ = getDefaultInstance().getHfilename();
onChanged();
return this;
}
/**
* required string hfilename = 1;
* @param value The bytes for hfilename to set.
* @return This builder for chaining.
*/
public Builder setHfilenameBytes(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
hfilename_ = value;
onChanged();
return this;
}
private long offset_ ;
/**
* required int64 offset = 2;
* @return Whether the offset field is set.
*/
@java.lang.Override
public boolean hasOffset() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required int64 offset = 2;
* @return The offset.
*/
@java.lang.Override
public long getOffset() {
return offset_;
}
/**
* required int64 offset = 2;
* @param value The offset to set.
* @return This builder for chaining.
*/
public Builder setOffset(long value) {
bitField0_ |= 0x00000002;
offset_ = value;
onChanged();
return this;
}
/**
* required int64 offset = 2;
* @return This builder for chaining.
*/
public Builder clearOffset() {
bitField0_ = (bitField0_ & ~0x00000002);
offset_ = 0L;
onChanged();
return this;
}
private int blockType_ = 0;
/**
* required .hbase.pb.BlockType block_type = 3;
* @return Whether the blockType field is set.
*/
@java.lang.Override public boolean hasBlockType() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* required .hbase.pb.BlockType block_type = 3;
* @return The blockType.
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockType getBlockType() {
@SuppressWarnings("deprecation")
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockType result = org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockType.valueOf(blockType_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockType.data : result;
}
/**
* required .hbase.pb.BlockType block_type = 3;
* @param value The blockType to set.
* @return This builder for chaining.
*/
public Builder setBlockType(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
blockType_ = value.getNumber();
onChanged();
return this;
}
/**
* required .hbase.pb.BlockType block_type = 3;
* @return This builder for chaining.
*/
public Builder clearBlockType() {
bitField0_ = (bitField0_ & ~0x00000004);
blockType_ = 0;
onChanged();
return this;
}
private boolean primaryReplicaBlock_ ;
/**
* required bool primary_replica_block = 4;
* @return Whether the primaryReplicaBlock field is set.
*/
@java.lang.Override
public boolean hasPrimaryReplicaBlock() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* required bool primary_replica_block = 4;
* @return The primaryReplicaBlock.
*/
@java.lang.Override
public boolean getPrimaryReplicaBlock() {
return primaryReplicaBlock_;
}
/**
* required bool primary_replica_block = 4;
* @param value The primaryReplicaBlock to set.
* @return This builder for chaining.
*/
public Builder setPrimaryReplicaBlock(boolean value) {
bitField0_ |= 0x00000008;
primaryReplicaBlock_ = value;
onChanged();
return this;
}
/**
* required bool primary_replica_block = 4;
* @return This builder for chaining.
*/
public Builder clearPrimaryReplicaBlock() {
bitField0_ = (bitField0_ & ~0x00000008);
primaryReplicaBlock_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.BlockCacheKey)
}
// @@protoc_insertion_point(class_scope:hbase.pb.BlockCacheKey)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser() {
@java.lang.Override
public BlockCacheKey parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new BlockCacheKey(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface BucketEntryOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.BucketEntry)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* required int64 offset = 1;
* @return Whether the offset field is set.
*/
boolean hasOffset();
/**
* required int64 offset = 1;
* @return The offset.
*/
long getOffset();
/**
* required int32 length = 2;
* @return Whether the length field is set.
*/
boolean hasLength();
/**
* required int32 length = 2;
* @return The length.
*/
int getLength();
/**
* required int64 access_counter = 3;
* @return Whether the accessCounter field is set.
*/
boolean hasAccessCounter();
/**
* required int64 access_counter = 3;
* @return The accessCounter.
*/
long getAccessCounter();
/**
* required int32 deserialiser_index = 4;
* @return Whether the deserialiserIndex field is set.
*/
boolean hasDeserialiserIndex();
/**
* required int32 deserialiser_index = 4;
* @return The deserialiserIndex.
*/
int getDeserialiserIndex();
/**
* required .hbase.pb.BlockPriority priority = 5;
* @return Whether the priority field is set.
*/
boolean hasPriority();
/**
* required .hbase.pb.BlockPriority priority = 5;
* @return The priority.
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockPriority getPriority();
}
/**
* Protobuf type {@code hbase.pb.BucketEntry}
*/
@javax.annotation.Generated("proto") public static final class BucketEntry extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.BucketEntry)
BucketEntryOrBuilder {
private static final long serialVersionUID = 0L;
// Use BucketEntry.newBuilder() to construct.
private BucketEntry(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private BucketEntry() {
priority_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new BucketEntry();
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private BucketEntry(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
bitField0_ |= 0x00000001;
offset_ = input.readInt64();
break;
}
case 16: {
bitField0_ |= 0x00000002;
length_ = input.readInt32();
break;
}
case 24: {
bitField0_ |= 0x00000004;
accessCounter_ = input.readInt64();
break;
}
case 32: {
bitField0_ |= 0x00000008;
deserialiserIndex_ = input.readInt32();
break;
}
case 40: {
int rawValue = input.readEnum();
@SuppressWarnings("deprecation")
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockPriority value = org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockPriority.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(5, rawValue);
} else {
bitField0_ |= 0x00000010;
priority_ = rawValue;
}
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BucketEntry_descriptor;
}
@java.lang.Override
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BucketEntry_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry.class, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry.Builder.class);
}
private int bitField0_;
public static final int OFFSET_FIELD_NUMBER = 1;
private long offset_;
/**
* required int64 offset = 1;
* @return Whether the offset field is set.
*/
@java.lang.Override
public boolean hasOffset() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required int64 offset = 1;
* @return The offset.
*/
@java.lang.Override
public long getOffset() {
return offset_;
}
public static final int LENGTH_FIELD_NUMBER = 2;
private int length_;
/**
* required int32 length = 2;
* @return Whether the length field is set.
*/
@java.lang.Override
public boolean hasLength() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required int32 length = 2;
* @return The length.
*/
@java.lang.Override
public int getLength() {
return length_;
}
public static final int ACCESS_COUNTER_FIELD_NUMBER = 3;
private long accessCounter_;
/**
* required int64 access_counter = 3;
* @return Whether the accessCounter field is set.
*/
@java.lang.Override
public boolean hasAccessCounter() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* required int64 access_counter = 3;
* @return The accessCounter.
*/
@java.lang.Override
public long getAccessCounter() {
return accessCounter_;
}
public static final int DESERIALISER_INDEX_FIELD_NUMBER = 4;
private int deserialiserIndex_;
/**
* required int32 deserialiser_index = 4;
* @return Whether the deserialiserIndex field is set.
*/
@java.lang.Override
public boolean hasDeserialiserIndex() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* required int32 deserialiser_index = 4;
* @return The deserialiserIndex.
*/
@java.lang.Override
public int getDeserialiserIndex() {
return deserialiserIndex_;
}
public static final int PRIORITY_FIELD_NUMBER = 5;
private int priority_;
/**
* required .hbase.pb.BlockPriority priority = 5;
* @return Whether the priority field is set.
*/
@java.lang.Override public boolean hasPriority() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* required .hbase.pb.BlockPriority priority = 5;
* @return The priority.
*/
@java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockPriority getPriority() {
@SuppressWarnings("deprecation")
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockPriority result = org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockPriority.valueOf(priority_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockPriority.single : result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasOffset()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasLength()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasAccessCounter()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasDeserialiserIndex()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasPriority()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeInt64(1, offset_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeInt32(2, length_);
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeInt64(3, accessCounter_);
}
if (((bitField0_ & 0x00000008) != 0)) {
output.writeInt32(4, deserialiserIndex_);
}
if (((bitField0_ & 0x00000010) != 0)) {
output.writeEnum(5, priority_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeInt64Size(1, offset_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeInt32Size(2, length_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeInt64Size(3, accessCounter_);
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeInt32Size(4, deserialiserIndex_);
}
if (((bitField0_ & 0x00000010) != 0)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeEnumSize(5, priority_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry other = (org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry) obj;
if (hasOffset() != other.hasOffset()) return false;
if (hasOffset()) {
if (getOffset()
!= other.getOffset()) return false;
}
if (hasLength() != other.hasLength()) return false;
if (hasLength()) {
if (getLength()
!= other.getLength()) return false;
}
if (hasAccessCounter() != other.hasAccessCounter()) return false;
if (hasAccessCounter()) {
if (getAccessCounter()
!= other.getAccessCounter()) return false;
}
if (hasDeserialiserIndex() != other.hasDeserialiserIndex()) return false;
if (hasDeserialiserIndex()) {
if (getDeserialiserIndex()
!= other.getDeserialiserIndex()) return false;
}
if (hasPriority() != other.hasPriority()) return false;
if (hasPriority()) {
if (priority_ != other.priority_) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasOffset()) {
hash = (37 * hash) + OFFSET_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong(
getOffset());
}
if (hasLength()) {
hash = (37 * hash) + LENGTH_FIELD_NUMBER;
hash = (53 * hash) + getLength();
}
if (hasAccessCounter()) {
hash = (37 * hash) + ACCESS_COUNTER_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong(
getAccessCounter());
}
if (hasDeserialiserIndex()) {
hash = (37 * hash) + DESERIALISER_INDEX_FIELD_NUMBER;
hash = (53 * hash) + getDeserialiserIndex();
}
if (hasPriority()) {
hash = (37 * hash) + PRIORITY_FIELD_NUMBER;
hash = (53 * hash) + priority_;
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.BucketEntry}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hbase.pb.BucketEntry)
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntryOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BucketEntry_descriptor;
}
@java.lang.Override
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BucketEntry_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry.class, org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
offset_ = 0L;
bitField0_ = (bitField0_ & ~0x00000001);
length_ = 0;
bitField0_ = (bitField0_ & ~0x00000002);
accessCounter_ = 0L;
bitField0_ = (bitField0_ & ~0x00000004);
deserialiserIndex_ = 0;
bitField0_ = (bitField0_ & ~0x00000008);
priority_ = 0;
bitField0_ = (bitField0_ & ~0x00000010);
return this;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.internal_static_hbase_pb_BucketEntry_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry result = new org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.offset_ = offset_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.length_ = length_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.accessCounter_ = accessCounter_;
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.deserialiserIndex_ = deserialiserIndex_;
to_bitField0_ |= 0x00000008;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
to_bitField0_ |= 0x00000010;
}
result.priority_ = priority_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry.getDefaultInstance()) return this;
if (other.hasOffset()) {
setOffset(other.getOffset());
}
if (other.hasLength()) {
setLength(other.getLength());
}
if (other.hasAccessCounter()) {
setAccessCounter(other.getAccessCounter());
}
if (other.hasDeserialiserIndex()) {
setDeserialiserIndex(other.getDeserialiserIndex());
}
if (other.hasPriority()) {
setPriority(other.getPriority());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasOffset()) {
return false;
}
if (!hasLength()) {
return false;
}
if (!hasAccessCounter()) {
return false;
}
if (!hasDeserialiserIndex()) {
return false;
}
if (!hasPriority()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private long offset_ ;
/**
* required int64 offset = 1;
* @return Whether the offset field is set.
*/
@java.lang.Override
public boolean hasOffset() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required int64 offset = 1;
* @return The offset.
*/
@java.lang.Override
public long getOffset() {
return offset_;
}
/**
* required int64 offset = 1;
* @param value The offset to set.
* @return This builder for chaining.
*/
public Builder setOffset(long value) {
bitField0_ |= 0x00000001;
offset_ = value;
onChanged();
return this;
}
/**
* required int64 offset = 1;
* @return This builder for chaining.
*/
public Builder clearOffset() {
bitField0_ = (bitField0_ & ~0x00000001);
offset_ = 0L;
onChanged();
return this;
}
private int length_ ;
/**
* required int32 length = 2;
* @return Whether the length field is set.
*/
@java.lang.Override
public boolean hasLength() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required int32 length = 2;
* @return The length.
*/
@java.lang.Override
public int getLength() {
return length_;
}
/**
* required int32 length = 2;
* @param value The length to set.
* @return This builder for chaining.
*/
public Builder setLength(int value) {
bitField0_ |= 0x00000002;
length_ = value;
onChanged();
return this;
}
/**
* required int32 length = 2;
* @return This builder for chaining.
*/
public Builder clearLength() {
bitField0_ = (bitField0_ & ~0x00000002);
length_ = 0;
onChanged();
return this;
}
private long accessCounter_ ;
/**
* required int64 access_counter = 3;
* @return Whether the accessCounter field is set.
*/
@java.lang.Override
public boolean hasAccessCounter() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* required int64 access_counter = 3;
* @return The accessCounter.
*/
@java.lang.Override
public long getAccessCounter() {
return accessCounter_;
}
/**
* required int64 access_counter = 3;
* @param value The accessCounter to set.
* @return This builder for chaining.
*/
public Builder setAccessCounter(long value) {
bitField0_ |= 0x00000004;
accessCounter_ = value;
onChanged();
return this;
}
/**
* required int64 access_counter = 3;
* @return This builder for chaining.
*/
public Builder clearAccessCounter() {
bitField0_ = (bitField0_ & ~0x00000004);
accessCounter_ = 0L;
onChanged();
return this;
}
private int deserialiserIndex_ ;
/**
* required int32 deserialiser_index = 4;
* @return Whether the deserialiserIndex field is set.
*/
@java.lang.Override
public boolean hasDeserialiserIndex() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* required int32 deserialiser_index = 4;
* @return The deserialiserIndex.
*/
@java.lang.Override
public int getDeserialiserIndex() {
return deserialiserIndex_;
}
/**
* required int32 deserialiser_index = 4;
* @param value The deserialiserIndex to set.
* @return This builder for chaining.
*/
public Builder setDeserialiserIndex(int value) {
bitField0_ |= 0x00000008;
deserialiserIndex_ = value;
onChanged();
return this;
}
/**
* required int32 deserialiser_index = 4;
* @return This builder for chaining.
*/
public Builder clearDeserialiserIndex() {
bitField0_ = (bitField0_ & ~0x00000008);
deserialiserIndex_ = 0;
onChanged();
return this;
}
private int priority_ = 0;
/**
* required .hbase.pb.BlockPriority priority = 5;
* @return Whether the priority field is set.
*/
@java.lang.Override public boolean hasPriority() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* required .hbase.pb.BlockPriority priority = 5;
* @return The priority.
*/
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockPriority getPriority() {
@SuppressWarnings("deprecation")
org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockPriority result = org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockPriority.valueOf(priority_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockPriority.single : result;
}
/**
* required .hbase.pb.BlockPriority priority = 5;
* @param value The priority to set.
* @return This builder for chaining.
*/
public Builder setPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockPriority value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
priority_ = value.getNumber();
onChanged();
return this;
}
/**
* required .hbase.pb.BlockPriority priority = 5;
* @return This builder for chaining.
*/
public Builder clearPriority() {
bitField0_ = (bitField0_ & ~0x00000010);
priority_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.BucketEntry)
}
// @@protoc_insertion_point(class_scope:hbase.pb.BucketEntry)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser() {
@java.lang.Override
public BucketEntry parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new BucketEntry(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_BucketCacheEntry_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_BucketCacheEntry_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_BucketCacheEntry_DeserializersEntry_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_BucketCacheEntry_DeserializersEntry_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_BackingMap_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_BackingMap_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_BackingMapEntry_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_BackingMapEntry_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_BlockCacheKey_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_BlockCacheKey_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_BucketEntry_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_BucketEntry_fieldAccessorTable;
public static org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\026BucketCacheEntry.proto\022\010hbase.pb\"\210\002\n\020B" +
"ucketCacheEntry\022\026\n\016cache_capacity\030\001 \002(\003\022" +
"\020\n\010io_class\030\002 \002(\t\022\021\n\tmap_class\030\003 \002(\t\022D\n\r" +
"deserializers\030\004 \003(\0132-.hbase.pb.BucketCac" +
"heEntry.DeserializersEntry\022)\n\013backing_ma" +
"p\030\005 \002(\0132\024.hbase.pb.BackingMap\022\020\n\010checksu" +
"m\030\006 \001(\014\0324\n\022DeserializersEntry\022\013\n\003key\030\001 \001" +
"(\005\022\r\n\005value\030\002 \001(\t:\0028\001\"6\n\nBackingMap\022(\n\005e" +
"ntry\030\001 \003(\0132\031.hbase.pb.BackingMapEntry\"]\n" +
"\017BackingMapEntry\022$\n\003key\030\001 \002(\0132\027.hbase.pb" +
".BlockCacheKey\022$\n\005value\030\002 \002(\0132\025.hbase.pb" +
".BucketEntry\"z\n\rBlockCacheKey\022\021\n\thfilena" +
"me\030\001 \002(\t\022\016\n\006offset\030\002 \002(\003\022\'\n\nblock_type\030\003" +
" \002(\0162\023.hbase.pb.BlockType\022\035\n\025primary_rep" +
"lica_block\030\004 \002(\010\"\214\001\n\013BucketEntry\022\016\n\006offs" +
"et\030\001 \002(\003\022\016\n\006length\030\002 \002(\005\022\026\n\016access_count" +
"er\030\003 \002(\003\022\032\n\022deserialiser_index\030\004 \002(\005\022)\n\010" +
"priority\030\005 \002(\0162\027.hbase.pb.BlockPriority*" +
"\332\001\n\tBlockType\022\010\n\004data\020\000\022\020\n\014encoded_data\020" +
"\001\022\016\n\nleaf_index\020\002\022\017\n\013bloom_chunk\020\003\022\010\n\004me" +
"ta\020\004\022\026\n\022intermediate_index\020\005\022\016\n\nroot_ind" +
"ex\020\006\022\r\n\tfile_info\020\007\022\026\n\022general_bloom_met" +
"a\020\010\022\034\n\030delete_family_bloom_meta\020\t\022\013\n\007tra" +
"iler\020\n\022\014\n\010index_v1\020\013*2\n\rBlockPriority\022\n\n" +
"\006single\020\000\022\t\n\005multi\020\001\022\n\n\006memory\020\002BN\n1org." +
"apache.hadoop.hbase.shaded.protobuf.gene" +
"ratedB\021BucketCacheProtosH\001\210\001\001\240\001\001"
};
descriptor = org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FileDescriptor[] {
});
internal_static_hbase_pb_BucketCacheEntry_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_hbase_pb_BucketCacheEntry_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_BucketCacheEntry_descriptor,
new java.lang.String[] { "CacheCapacity", "IoClass", "MapClass", "Deserializers", "BackingMap", "Checksum", });
internal_static_hbase_pb_BucketCacheEntry_DeserializersEntry_descriptor =
internal_static_hbase_pb_BucketCacheEntry_descriptor.getNestedTypes().get(0);
internal_static_hbase_pb_BucketCacheEntry_DeserializersEntry_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_BucketCacheEntry_DeserializersEntry_descriptor,
new java.lang.String[] { "Key", "Value", });
internal_static_hbase_pb_BackingMap_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_hbase_pb_BackingMap_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_BackingMap_descriptor,
new java.lang.String[] { "Entry", });
internal_static_hbase_pb_BackingMapEntry_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_hbase_pb_BackingMapEntry_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_BackingMapEntry_descriptor,
new java.lang.String[] { "Key", "Value", });
internal_static_hbase_pb_BlockCacheKey_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_hbase_pb_BlockCacheKey_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_BlockCacheKey_descriptor,
new java.lang.String[] { "Hfilename", "Offset", "BlockType", "PrimaryReplicaBlock", });
internal_static_hbase_pb_BucketEntry_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_hbase_pb_BucketEntry_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_BucketEntry_descriptor,
new java.lang.String[] { "Offset", "Length", "AccessCounter", "DeserialiserIndex", "Priority", });
}
// @@protoc_insertion_point(outer_class_scope)
}