All Downloads are FREE. Search and download functionalities are using the official Maven repository.

cz.proto.HashCluster Maven / Gradle / Ivy

There is a newer version: 2.0.0
Show newest version
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: table_common.proto

package cz.proto;

/**
 * Protobuf type {@code cz.proto.HashCluster}
 */
public final class HashCluster extends
    com.google.protobuf.GeneratedMessageV3 implements
    // @@protoc_insertion_point(message_implements:cz.proto.HashCluster)
    HashClusterOrBuilder {
private static final long serialVersionUID = 0L;
  // Use HashCluster.newBuilder() to construct.
  private HashCluster(com.google.protobuf.GeneratedMessageV3.Builder builder) {
    super(builder);
  }
  private HashCluster() {
    bucketType_ = 0;
  }

  @java.lang.Override
  @SuppressWarnings({"unused"})
  protected java.lang.Object newInstance(
      UnusedPrivateParameter unused) {
    return new HashCluster();
  }

  @java.lang.Override
  public final com.google.protobuf.UnknownFieldSet
  getUnknownFields() {
    return this.unknownFields;
  }
  private HashCluster(
      com.google.protobuf.CodedInputStream input,
      com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws com.google.protobuf.InvalidProtocolBufferException {
    this();
    if (extensionRegistry == null) {
      throw new java.lang.NullPointerException();
    }
    com.google.protobuf.UnknownFieldSet.Builder unknownFields =
        com.google.protobuf.UnknownFieldSet.newBuilder();
    try {
      boolean done = false;
      while (!done) {
        int tag = input.readTag();
        switch (tag) {
          case 0:
            done = true;
            break;
          case 8: {

            functionVersion_ = input.readUInt32();
            break;
          }
          case 16: {
            int rawValue = input.readEnum();

            bucketType_ = rawValue;
            break;
          }
          default: {
            if (!parseUnknownField(
                input, unknownFields, extensionRegistry, tag)) {
              done = true;
            }
            break;
          }
        }
      }
    } catch (com.google.protobuf.InvalidProtocolBufferException e) {
      throw e.setUnfinishedMessage(this);
    } catch (java.io.IOException e) {
      throw new com.google.protobuf.InvalidProtocolBufferException(
          e).setUnfinishedMessage(this);
    } finally {
      this.unknownFields = unknownFields.build();
      makeExtensionsImmutable();
    }
  }
  public static final com.google.protobuf.Descriptors.Descriptor
      getDescriptor() {
    return cz.proto.TableCommon.internal_static_cz_proto_HashCluster_descriptor;
  }

  @java.lang.Override
  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
      internalGetFieldAccessorTable() {
    return cz.proto.TableCommon.internal_static_cz_proto_HashCluster_fieldAccessorTable
        .ensureFieldAccessorsInitialized(
            cz.proto.HashCluster.class, cz.proto.HashCluster.Builder.class);
  }

  public static final int FUNCTION_VERSION_FIELD_NUMBER = 1;
  private int functionVersion_;
  /**
   * uint32 function_version = 1;
   * @return The functionVersion.
   */
  @java.lang.Override
  public int getFunctionVersion() {
    return functionVersion_;
  }

  public static final int BUCKET_TYPE_FIELD_NUMBER = 2;
  private int bucketType_;
  /**
   * .cz.proto.HashBucketType bucket_type = 2;
   * @return The enum numeric value on the wire for bucketType.
   */
  @java.lang.Override public int getBucketTypeValue() {
    return bucketType_;
  }
  /**
   * .cz.proto.HashBucketType bucket_type = 2;
   * @return The bucketType.
   */
  @java.lang.Override public cz.proto.HashBucketType getBucketType() {
    @SuppressWarnings("deprecation")
    cz.proto.HashBucketType result = cz.proto.HashBucketType.valueOf(bucketType_);
    return result == null ? cz.proto.HashBucketType.UNRECOGNIZED : result;
  }

  private byte memoizedIsInitialized = -1;
  @java.lang.Override
  public final boolean isInitialized() {
    byte isInitialized = memoizedIsInitialized;
    if (isInitialized == 1) return true;
    if (isInitialized == 0) return false;

    memoizedIsInitialized = 1;
    return true;
  }

  @java.lang.Override
  public void writeTo(com.google.protobuf.CodedOutputStream output)
                      throws java.io.IOException {
    if (functionVersion_ != 0) {
      output.writeUInt32(1, functionVersion_);
    }
    if (bucketType_ != cz.proto.HashBucketType.HASH_MOD.getNumber()) {
      output.writeEnum(2, bucketType_);
    }
    unknownFields.writeTo(output);
  }

  @java.lang.Override
  public int getSerializedSize() {
    int size = memoizedSize;
    if (size != -1) return size;

    size = 0;
    if (functionVersion_ != 0) {
      size += com.google.protobuf.CodedOutputStream
        .computeUInt32Size(1, functionVersion_);
    }
    if (bucketType_ != cz.proto.HashBucketType.HASH_MOD.getNumber()) {
      size += com.google.protobuf.CodedOutputStream
        .computeEnumSize(2, bucketType_);
    }
    size += unknownFields.getSerializedSize();
    memoizedSize = size;
    return size;
  }

  @java.lang.Override
  public boolean equals(final java.lang.Object obj) {
    if (obj == this) {
     return true;
    }
    if (!(obj instanceof cz.proto.HashCluster)) {
      return super.equals(obj);
    }
    cz.proto.HashCluster other = (cz.proto.HashCluster) obj;

    if (getFunctionVersion()
        != other.getFunctionVersion()) return false;
    if (bucketType_ != other.bucketType_) return false;
    if (!unknownFields.equals(other.unknownFields)) return false;
    return true;
  }

  @java.lang.Override
  public int hashCode() {
    if (memoizedHashCode != 0) {
      return memoizedHashCode;
    }
    int hash = 41;
    hash = (19 * hash) + getDescriptor().hashCode();
    hash = (37 * hash) + FUNCTION_VERSION_FIELD_NUMBER;
    hash = (53 * hash) + getFunctionVersion();
    hash = (37 * hash) + BUCKET_TYPE_FIELD_NUMBER;
    hash = (53 * hash) + bucketType_;
    hash = (29 * hash) + unknownFields.hashCode();
    memoizedHashCode = hash;
    return hash;
  }

  public static cz.proto.HashCluster parseFrom(
      java.nio.ByteBuffer data)
      throws com.google.protobuf.InvalidProtocolBufferException {
    return PARSER.parseFrom(data);
  }
  public static cz.proto.HashCluster parseFrom(
      java.nio.ByteBuffer data,
      com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws com.google.protobuf.InvalidProtocolBufferException {
    return PARSER.parseFrom(data, extensionRegistry);
  }
  public static cz.proto.HashCluster parseFrom(
      com.google.protobuf.ByteString data)
      throws com.google.protobuf.InvalidProtocolBufferException {
    return PARSER.parseFrom(data);
  }
  public static cz.proto.HashCluster parseFrom(
      com.google.protobuf.ByteString data,
      com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws com.google.protobuf.InvalidProtocolBufferException {
    return PARSER.parseFrom(data, extensionRegistry);
  }
  public static cz.proto.HashCluster parseFrom(byte[] data)
      throws com.google.protobuf.InvalidProtocolBufferException {
    return PARSER.parseFrom(data);
  }
  public static cz.proto.HashCluster parseFrom(
      byte[] data,
      com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws com.google.protobuf.InvalidProtocolBufferException {
    return PARSER.parseFrom(data, extensionRegistry);
  }
  public static cz.proto.HashCluster parseFrom(java.io.InputStream input)
      throws java.io.IOException {
    return com.google.protobuf.GeneratedMessageV3
        .parseWithIOException(PARSER, input);
  }
  public static cz.proto.HashCluster parseFrom(
      java.io.InputStream input,
      com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws java.io.IOException {
    return com.google.protobuf.GeneratedMessageV3
        .parseWithIOException(PARSER, input, extensionRegistry);
  }
  public static cz.proto.HashCluster parseDelimitedFrom(java.io.InputStream input)
      throws java.io.IOException {
    return com.google.protobuf.GeneratedMessageV3
        .parseDelimitedWithIOException(PARSER, input);
  }
  public static cz.proto.HashCluster parseDelimitedFrom(
      java.io.InputStream input,
      com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws java.io.IOException {
    return com.google.protobuf.GeneratedMessageV3
        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
  }
  public static cz.proto.HashCluster parseFrom(
      com.google.protobuf.CodedInputStream input)
      throws java.io.IOException {
    return com.google.protobuf.GeneratedMessageV3
        .parseWithIOException(PARSER, input);
  }
  public static cz.proto.HashCluster parseFrom(
      com.google.protobuf.CodedInputStream input,
      com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws java.io.IOException {
    return com.google.protobuf.GeneratedMessageV3
        .parseWithIOException(PARSER, input, extensionRegistry);
  }

  @java.lang.Override
  public Builder newBuilderForType() { return newBuilder(); }
  public static Builder newBuilder() {
    return DEFAULT_INSTANCE.toBuilder();
  }
  public static Builder newBuilder(cz.proto.HashCluster prototype) {
    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
  }
  @java.lang.Override
  public Builder toBuilder() {
    return this == DEFAULT_INSTANCE
        ? new Builder() : new Builder().mergeFrom(this);
  }

  @java.lang.Override
  protected Builder newBuilderForType(
      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
    Builder builder = new Builder(parent);
    return builder;
  }
  /**
   * Protobuf type {@code cz.proto.HashCluster}
   */
  public static final class Builder extends
      com.google.protobuf.GeneratedMessageV3.Builder implements
      // @@protoc_insertion_point(builder_implements:cz.proto.HashCluster)
      cz.proto.HashClusterOrBuilder {
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return cz.proto.TableCommon.internal_static_cz_proto_HashCluster_descriptor;
    }

    @java.lang.Override
    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return cz.proto.TableCommon.internal_static_cz_proto_HashCluster_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              cz.proto.HashCluster.class, cz.proto.HashCluster.Builder.class);
    }

    // Construct using cz.proto.HashCluster.newBuilder()
    private Builder() {
      maybeForceBuilderInitialization();
    }

    private Builder(
        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
      super(parent);
      maybeForceBuilderInitialization();
    }
    private void maybeForceBuilderInitialization() {
      if (com.google.protobuf.GeneratedMessageV3
              .alwaysUseFieldBuilders) {
      }
    }
    @java.lang.Override
    public Builder clear() {
      super.clear();
      functionVersion_ = 0;

      bucketType_ = 0;

      return this;
    }

    @java.lang.Override
    public com.google.protobuf.Descriptors.Descriptor
        getDescriptorForType() {
      return cz.proto.TableCommon.internal_static_cz_proto_HashCluster_descriptor;
    }

    @java.lang.Override
    public cz.proto.HashCluster getDefaultInstanceForType() {
      return cz.proto.HashCluster.getDefaultInstance();
    }

    @java.lang.Override
    public cz.proto.HashCluster build() {
      cz.proto.HashCluster result = buildPartial();
      if (!result.isInitialized()) {
        throw newUninitializedMessageException(result);
      }
      return result;
    }

    @java.lang.Override
    public cz.proto.HashCluster buildPartial() {
      cz.proto.HashCluster result = new cz.proto.HashCluster(this);
      result.functionVersion_ = functionVersion_;
      result.bucketType_ = bucketType_;
      onBuilt();
      return result;
    }

    @java.lang.Override
    public Builder clone() {
      return super.clone();
    }
    @java.lang.Override
    public Builder setField(
        com.google.protobuf.Descriptors.FieldDescriptor field,
        java.lang.Object value) {
      return super.setField(field, value);
    }
    @java.lang.Override
    public Builder clearField(
        com.google.protobuf.Descriptors.FieldDescriptor field) {
      return super.clearField(field);
    }
    @java.lang.Override
    public Builder clearOneof(
        com.google.protobuf.Descriptors.OneofDescriptor oneof) {
      return super.clearOneof(oneof);
    }
    @java.lang.Override
    public Builder setRepeatedField(
        com.google.protobuf.Descriptors.FieldDescriptor field,
        int index, java.lang.Object value) {
      return super.setRepeatedField(field, index, value);
    }
    @java.lang.Override
    public Builder addRepeatedField(
        com.google.protobuf.Descriptors.FieldDescriptor field,
        java.lang.Object value) {
      return super.addRepeatedField(field, value);
    }
    @java.lang.Override
    public Builder mergeFrom(com.google.protobuf.Message other) {
      if (other instanceof cz.proto.HashCluster) {
        return mergeFrom((cz.proto.HashCluster)other);
      } else {
        super.mergeFrom(other);
        return this;
      }
    }

    public Builder mergeFrom(cz.proto.HashCluster other) {
      if (other == cz.proto.HashCluster.getDefaultInstance()) return this;
      if (other.getFunctionVersion() != 0) {
        setFunctionVersion(other.getFunctionVersion());
      }
      if (other.bucketType_ != 0) {
        setBucketTypeValue(other.getBucketTypeValue());
      }
      this.mergeUnknownFields(other.unknownFields);
      onChanged();
      return this;
    }

    @java.lang.Override
    public final boolean isInitialized() {
      return true;
    }

    @java.lang.Override
    public Builder mergeFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      cz.proto.HashCluster parsedMessage = null;
      try {
        parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        parsedMessage = (cz.proto.HashCluster) e.getUnfinishedMessage();
        throw e.unwrapIOException();
      } finally {
        if (parsedMessage != null) {
          mergeFrom(parsedMessage);
        }
      }
      return this;
    }

    private int functionVersion_ ;
    /**
     * uint32 function_version = 1;
     * @return The functionVersion.
     */
    @java.lang.Override
    public int getFunctionVersion() {
      return functionVersion_;
    }
    /**
     * uint32 function_version = 1;
     * @param value The functionVersion to set.
     * @return This builder for chaining.
     */
    public Builder setFunctionVersion(int value) {
      
      functionVersion_ = value;
      onChanged();
      return this;
    }
    /**
     * uint32 function_version = 1;
     * @return This builder for chaining.
     */
    public Builder clearFunctionVersion() {
      
      functionVersion_ = 0;
      onChanged();
      return this;
    }

    private int bucketType_ = 0;
    /**
     * .cz.proto.HashBucketType bucket_type = 2;
     * @return The enum numeric value on the wire for bucketType.
     */
    @java.lang.Override public int getBucketTypeValue() {
      return bucketType_;
    }
    /**
     * .cz.proto.HashBucketType bucket_type = 2;
     * @param value The enum numeric value on the wire for bucketType to set.
     * @return This builder for chaining.
     */
    public Builder setBucketTypeValue(int value) {
      
      bucketType_ = value;
      onChanged();
      return this;
    }
    /**
     * .cz.proto.HashBucketType bucket_type = 2;
     * @return The bucketType.
     */
    @java.lang.Override
    public cz.proto.HashBucketType getBucketType() {
      @SuppressWarnings("deprecation")
      cz.proto.HashBucketType result = cz.proto.HashBucketType.valueOf(bucketType_);
      return result == null ? cz.proto.HashBucketType.UNRECOGNIZED : result;
    }
    /**
     * .cz.proto.HashBucketType bucket_type = 2;
     * @param value The bucketType to set.
     * @return This builder for chaining.
     */
    public Builder setBucketType(cz.proto.HashBucketType value) {
      if (value == null) {
        throw new NullPointerException();
      }
      
      bucketType_ = value.getNumber();
      onChanged();
      return this;
    }
    /**
     * .cz.proto.HashBucketType bucket_type = 2;
     * @return This builder for chaining.
     */
    public Builder clearBucketType() {
      
      bucketType_ = 0;
      onChanged();
      return this;
    }
    @java.lang.Override
    public final Builder setUnknownFields(
        final com.google.protobuf.UnknownFieldSet unknownFields) {
      return super.setUnknownFields(unknownFields);
    }

    @java.lang.Override
    public final Builder mergeUnknownFields(
        final com.google.protobuf.UnknownFieldSet unknownFields) {
      return super.mergeUnknownFields(unknownFields);
    }


    // @@protoc_insertion_point(builder_scope:cz.proto.HashCluster)
  }

  // @@protoc_insertion_point(class_scope:cz.proto.HashCluster)
  private static final cz.proto.HashCluster DEFAULT_INSTANCE;
  static {
    DEFAULT_INSTANCE = new cz.proto.HashCluster();
  }

  public static cz.proto.HashCluster getDefaultInstance() {
    return DEFAULT_INSTANCE;
  }

  private static final com.google.protobuf.Parser
      PARSER = new com.google.protobuf.AbstractParser() {
    @java.lang.Override
    public HashCluster parsePartialFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return new HashCluster(input, extensionRegistry);
    }
  };

  public static com.google.protobuf.Parser parser() {
    return PARSER;
  }

  @java.lang.Override
  public com.google.protobuf.Parser getParserForType() {
    return PARSER;
  }

  @java.lang.Override
  public cz.proto.HashCluster getDefaultInstanceForType() {
    return DEFAULT_INSTANCE;
  }

}





© 2015 - 2024 Weber Informatics LLC | Privacy Policy