All Downloads are FREE. Search and download functionalities are using the official Maven repository.
Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos Maven / Gradle / Ivy
Go to download
Protobuf protocol classes used by HBase to communicate.
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: ClusterId.proto
package org.apache.hadoop.hbase.protobuf.generated;
@javax.annotation.Generated("proto") public final class ClusterIdProtos {
private ClusterIdProtos() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
public interface ClusterIdOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string cluster_id = 1;
/**
* required string cluster_id = 1;
*
*
* This is the cluster id, a uuid as a String
*
*/
boolean hasClusterId();
/**
* required string cluster_id = 1;
*
*
* This is the cluster id, a uuid as a String
*
*/
java.lang.String getClusterId();
/**
* required string cluster_id = 1;
*
*
* This is the cluster id, a uuid as a String
*
*/
com.google.protobuf.ByteString
getClusterIdBytes();
}
/**
* Protobuf type {@code hbase.pb.ClusterId}
*
*
**
* Content of the '/hbase/hbaseid', cluster id, znode.
* Also cluster of the ${HBASE_ROOTDIR}/hbase.id file.
*
*/
@javax.annotation.Generated("proto") public static final class ClusterId extends
com.google.protobuf.GeneratedMessage
implements ClusterIdOrBuilder {
// Use ClusterId.newBuilder() to construct.
private ClusterId(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ClusterId(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ClusterId defaultInstance;
public static ClusterId getDefaultInstance() {
return defaultInstance;
}
public ClusterId getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ClusterId(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
clusterId_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_hbase_pb_ClusterId_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_hbase_pb_ClusterId_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.class, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder.class);
}
public static com.google.protobuf.Parser PARSER =
new com.google.protobuf.AbstractParser() {
public ClusterId parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ClusterId(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
private int bitField0_;
// required string cluster_id = 1;
public static final int CLUSTER_ID_FIELD_NUMBER = 1;
private java.lang.Object clusterId_;
/**
* required string cluster_id = 1;
*
*
* This is the cluster id, a uuid as a String
*
*/
public boolean hasClusterId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* required string cluster_id = 1;
*
*
* This is the cluster id, a uuid as a String
*
*/
public java.lang.String getClusterId() {
java.lang.Object ref = clusterId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
clusterId_ = s;
}
return s;
}
}
/**
* required string cluster_id = 1;
*
*
* This is the cluster id, a uuid as a String
*
*/
public com.google.protobuf.ByteString
getClusterIdBytes() {
java.lang.Object ref = clusterId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
clusterId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private void initFields() {
clusterId_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasClusterId()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getClusterIdBytes());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getClusterIdBytes());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId other = (org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId) obj;
boolean result = true;
result = result && (hasClusterId() == other.hasClusterId());
if (hasClusterId()) {
result = result && getClusterId()
.equals(other.getClusterId());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasClusterId()) {
hash = (37 * hash) + CLUSTER_ID_FIELD_NUMBER;
hash = (53 * hash) + getClusterId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.ClusterId}
*
*
**
* Content of the '/hbase/hbaseid', cluster id, znode.
* Also cluster of the ${HBASE_ROOTDIR}/hbase.id file.
*
*/
@javax.annotation.Generated("proto") public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder
implements org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_hbase_pb_ClusterId_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_hbase_pb_ClusterId_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.class, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
clusterId_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_hbase_pb_ClusterId_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId build() {
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId result = new org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.clusterId_ = clusterId_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance()) return this;
if (other.hasClusterId()) {
bitField0_ |= 0x00000001;
clusterId_ = other.clusterId_;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasClusterId()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string cluster_id = 1;
private java.lang.Object clusterId_ = "";
/**
* required string cluster_id = 1;
*
*
* This is the cluster id, a uuid as a String
*
*/
public boolean hasClusterId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* required string cluster_id = 1;
*
*
* This is the cluster id, a uuid as a String
*
*/
public java.lang.String getClusterId() {
java.lang.Object ref = clusterId_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
clusterId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string cluster_id = 1;
*
*
* This is the cluster id, a uuid as a String
*
*/
public com.google.protobuf.ByteString
getClusterIdBytes() {
java.lang.Object ref = clusterId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
clusterId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* required string cluster_id = 1;
*
*
* This is the cluster id, a uuid as a String
*
*/
public Builder setClusterId(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
clusterId_ = value;
onChanged();
return this;
}
/**
* required string cluster_id = 1;
*
*
* This is the cluster id, a uuid as a String
*
*/
public Builder clearClusterId() {
bitField0_ = (bitField0_ & ~0x00000001);
clusterId_ = getDefaultInstance().getClusterId();
onChanged();
return this;
}
/**
* required string cluster_id = 1;
*
*
* This is the cluster id, a uuid as a String
*
*/
public Builder setClusterIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
clusterId_ = value;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.ClusterId)
}
static {
defaultInstance = new ClusterId(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.ClusterId)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_ClusterId_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_ClusterId_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\017ClusterId.proto\022\010hbase.pb\"\037\n\tClusterId" +
"\022\022\n\ncluster_id\030\001 \002(\tBB\n*org.apache.hadoo" +
"p.hbase.protobuf.generatedB\017ClusterIdPro" +
"tosH\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_hbase_pb_ClusterId_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_hbase_pb_ClusterId_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_ClusterId_descriptor,
new java.lang.String[] { "ClusterId", });
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
// @@protoc_insertion_point(outer_class_scope)
}