All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.apache.hudi.org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos Maven / Gradle / Ivy

There is a newer version: 1.0.0-beta1
Show newest version
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: Procedure.proto

package org.apache.hadoop.hbase.shaded.protobuf.generated;

@javax.annotation.Generated("proto") public final class ProcedureProtos {
  private ProcedureProtos() {}
  public static void registerAllExtensions(
      org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite registry) {
  }

  public static void registerAllExtensions(
      org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistry registry) {
    registerAllExtensions(
        (org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite) registry);
  }
  /**
   * Protobuf enum {@code hbase.pb.ProcedureState}
   */
  public enum ProcedureState
      implements org.apache.hbase.thirdparty.com.google.protobuf.ProtocolMessageEnum {
    /**
     * 
     * Procedure in construction, not yet added to the executor
     * 
* * INITIALIZING = 1; */ INITIALIZING(1), /** *
     * Procedure added to the executor, and ready to be executed
     * 
* * RUNNABLE = 2; */ RUNNABLE(2), /** *
     * The procedure is waiting on children to be completed
     * 
* * WAITING = 3; */ WAITING(3), /** *
     * The procedure is waiting a timout or an external event
     * 
* * WAITING_TIMEOUT = 4; */ WAITING_TIMEOUT(4), /** *
     * The procedure failed and was rolledback
     * 
* * ROLLEDBACK = 5; */ ROLLEDBACK(5), /** *
     * The procedure execution is completed successfully.
     * 
* * SUCCESS = 6; */ SUCCESS(6), /** *
     * The procedure execution is failed, may need to rollback
     * 
* * FAILED = 7; */ FAILED(7), ; /** *
     * Procedure in construction, not yet added to the executor
     * 
* * INITIALIZING = 1; */ public static final int INITIALIZING_VALUE = 1; /** *
     * Procedure added to the executor, and ready to be executed
     * 
* * RUNNABLE = 2; */ public static final int RUNNABLE_VALUE = 2; /** *
     * The procedure is waiting on children to be completed
     * 
* * WAITING = 3; */ public static final int WAITING_VALUE = 3; /** *
     * The procedure is waiting a timout or an external event
     * 
* * WAITING_TIMEOUT = 4; */ public static final int WAITING_TIMEOUT_VALUE = 4; /** *
     * The procedure failed and was rolledback
     * 
* * ROLLEDBACK = 5; */ public static final int ROLLEDBACK_VALUE = 5; /** *
     * The procedure execution is completed successfully.
     * 
* * SUCCESS = 6; */ public static final int SUCCESS_VALUE = 6; /** *
     * The procedure execution is failed, may need to rollback
     * 
* * FAILED = 7; */ public static final int FAILED_VALUE = 7; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static ProcedureState valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static ProcedureState forNumber(int value) { switch (value) { case 1: return INITIALIZING; case 2: return RUNNABLE; case 3: return WAITING; case 4: return WAITING_TIMEOUT; case 5: return ROLLEDBACK; case 6: return SUCCESS; case 7: return FAILED; default: return null; } } public static org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap< ProcedureState> internalValueMap = new org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap() { public ProcedureState findValueByNumber(int number) { return ProcedureState.forNumber(number); } }; public final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.getDescriptor().getEnumTypes().get(0); } private static final ProcedureState[] VALUES = values(); public static ProcedureState valueOf( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private ProcedureState(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.ProcedureState) } public interface ProcedureOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.Procedure) org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder { /** *
     * internal "static" state
     * 
* * required string class_name = 1; * @return Whether the className field is set. */ boolean hasClassName(); /** *
     * internal "static" state
     * 
* * required string class_name = 1; * @return The className. */ java.lang.String getClassName(); /** *
     * internal "static" state
     * 
* * required string class_name = 1; * @return The bytes for className. */ org.apache.hbase.thirdparty.com.google.protobuf.ByteString getClassNameBytes(); /** *
     * parent if not a root-procedure otherwise not set
     * 
* * optional uint64 parent_id = 2; * @return Whether the parentId field is set. */ boolean hasParentId(); /** *
     * parent if not a root-procedure otherwise not set
     * 
* * optional uint64 parent_id = 2; * @return The parentId. */ long getParentId(); /** * required uint64 proc_id = 3; * @return Whether the procId field is set. */ boolean hasProcId(); /** * required uint64 proc_id = 3; * @return The procId. */ long getProcId(); /** * required uint64 submitted_time = 4; * @return Whether the submittedTime field is set. */ boolean hasSubmittedTime(); /** * required uint64 submitted_time = 4; * @return The submittedTime. */ long getSubmittedTime(); /** * optional string owner = 5; * @return Whether the owner field is set. */ boolean hasOwner(); /** * optional string owner = 5; * @return The owner. */ java.lang.String getOwner(); /** * optional string owner = 5; * @return The bytes for owner. */ org.apache.hbase.thirdparty.com.google.protobuf.ByteString getOwnerBytes(); /** *
     * internal "runtime" state
     * 
* * required .hbase.pb.ProcedureState state = 6; * @return Whether the state field is set. */ boolean hasState(); /** *
     * internal "runtime" state
     * 
* * required .hbase.pb.ProcedureState state = 6; * @return The state. */ org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState getState(); /** *
     * stack indices in case the procedure was running
     * 
* * repeated uint32 stack_id = 7; * @return A list containing the stackId. */ java.util.List getStackIdList(); /** *
     * stack indices in case the procedure was running
     * 
* * repeated uint32 stack_id = 7; * @return The count of stackId. */ int getStackIdCount(); /** *
     * stack indices in case the procedure was running
     * 
* * repeated uint32 stack_id = 7; * @param index The index of the element to return. * @return The stackId at the given index. */ int getStackId(int index); /** * required uint64 last_update = 8; * @return Whether the lastUpdate field is set. */ boolean hasLastUpdate(); /** * required uint64 last_update = 8; * @return The lastUpdate. */ long getLastUpdate(); /** * optional uint32 timeout = 9; * @return Whether the timeout field is set. */ boolean hasTimeout(); /** * optional uint32 timeout = 9; * @return The timeout. */ int getTimeout(); /** *
     * user state/results
     * 
* * optional .hbase.pb.ForeignExceptionMessage exception = 10; * @return Whether the exception field is set. */ boolean hasException(); /** *
     * user state/results
     * 
* * optional .hbase.pb.ForeignExceptionMessage exception = 10; * @return The exception. */ org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage getException(); /** *
     * user state/results
     * 
* * optional .hbase.pb.ForeignExceptionMessage exception = 10; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder getExceptionOrBuilder(); /** *
     * opaque (user) result structure
     * 
* * optional bytes result = 11; * @return Whether the result field is set. */ boolean hasResult(); /** *
     * opaque (user) result structure
     * 
* * optional bytes result = 11; * @return The result. */ org.apache.hbase.thirdparty.com.google.protobuf.ByteString getResult(); /** *
     * opaque (user) procedure internal-state - OBSOLATE
     * 
* * optional bytes state_data = 12; * @return Whether the stateData field is set. */ boolean hasStateData(); /** *
     * opaque (user) procedure internal-state - OBSOLATE
     * 
* * optional bytes state_data = 12; * @return The stateData. */ org.apache.hbase.thirdparty.com.google.protobuf.ByteString getStateData(); /** *
     * opaque (user) procedure internal-state
     * 
* * repeated .google.protobuf.Any state_message = 15; */ java.util.List getStateMessageList(); /** *
     * opaque (user) procedure internal-state
     * 
* * repeated .google.protobuf.Any state_message = 15; */ org.apache.hbase.thirdparty.com.google.protobuf.Any getStateMessage(int index); /** *
     * opaque (user) procedure internal-state
     * 
* * repeated .google.protobuf.Any state_message = 15; */ int getStateMessageCount(); /** *
     * opaque (user) procedure internal-state
     * 
* * repeated .google.protobuf.Any state_message = 15; */ java.util.List getStateMessageOrBuilderList(); /** *
     * opaque (user) procedure internal-state
     * 
* * repeated .google.protobuf.Any state_message = 15; */ org.apache.hbase.thirdparty.com.google.protobuf.AnyOrBuilder getStateMessageOrBuilder( int index); /** *
     * Nonce to prevent same procedure submit by multiple times
     * 
* * optional uint64 nonce_group = 13 [default = 0]; * @return Whether the nonceGroup field is set. */ boolean hasNonceGroup(); /** *
     * Nonce to prevent same procedure submit by multiple times
     * 
* * optional uint64 nonce_group = 13 [default = 0]; * @return The nonceGroup. */ long getNonceGroup(); /** * optional uint64 nonce = 14 [default = 0]; * @return Whether the nonce field is set. */ boolean hasNonce(); /** * optional uint64 nonce = 14 [default = 0]; * @return The nonce. */ long getNonce(); /** *
     * whether the procedure has held the lock
     * 
* * optional bool locked = 16 [default = false]; * @return Whether the locked field is set. */ boolean hasLocked(); /** *
     * whether the procedure has held the lock
     * 
* * optional bool locked = 16 [default = false]; * @return The locked. */ boolean getLocked(); /** *
     * whether the procedure need to be bypassed
     * 
* * optional bool bypass = 17 [default = false]; * @return Whether the bypass field is set. */ boolean hasBypass(); /** *
     * whether the procedure need to be bypassed
     * 
* * optional bool bypass = 17 [default = false]; * @return The bypass. */ boolean getBypass(); } /** *
   **
   * Procedure metadata, serialized by the ProcedureStore to be able to recover the old state.
   * 
* * Protobuf type {@code hbase.pb.Procedure} */ @javax.annotation.Generated("proto") public static final class Procedure extends org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.Procedure) ProcedureOrBuilder { private static final long serialVersionUID = 0L; // Use Procedure.newBuilder() to construct. private Procedure(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private Procedure() { className_ = ""; owner_ = ""; state_ = 1; stackId_ = emptyIntList(); result_ = org.apache.hbase.thirdparty.com.google.protobuf.ByteString.EMPTY; stateData_ = org.apache.hbase.thirdparty.com.google.protobuf.ByteString.EMPTY; stateMessage_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new Procedure(); } @java.lang.Override public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private Procedure( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; className_ = bs; break; } case 16: { bitField0_ |= 0x00000002; parentId_ = input.readUInt64(); break; } case 24: { bitField0_ |= 0x00000004; procId_ = input.readUInt64(); break; } case 32: { bitField0_ |= 0x00000008; submittedTime_ = input.readUInt64(); break; } case 42: { org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000010; owner_ = bs; break; } case 48: { int rawValue = input.readEnum(); @SuppressWarnings("deprecation") org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState value = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(6, rawValue); } else { bitField0_ |= 0x00000020; state_ = rawValue; } break; } case 56: { if (!((mutable_bitField0_ & 0x00000040) != 0)) { stackId_ = newIntList(); mutable_bitField0_ |= 0x00000040; } stackId_.addInt(input.readUInt32()); break; } case 58: { int length = input.readRawVarint32(); int limit = input.pushLimit(length); if (!((mutable_bitField0_ & 0x00000040) != 0) && input.getBytesUntilLimit() > 0) { stackId_ = newIntList(); mutable_bitField0_ |= 0x00000040; } while (input.getBytesUntilLimit() > 0) { stackId_.addInt(input.readUInt32()); } input.popLimit(limit); break; } case 64: { bitField0_ |= 0x00000040; lastUpdate_ = input.readUInt64(); break; } case 72: { bitField0_ |= 0x00000080; timeout_ = input.readUInt32(); break; } case 82: { org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder subBuilder = null; if (((bitField0_ & 0x00000100) != 0)) { subBuilder = exception_.toBuilder(); } exception_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(exception_); exception_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000100; break; } case 90: { bitField0_ |= 0x00000200; result_ = input.readBytes(); break; } case 98: { bitField0_ |= 0x00000400; stateData_ = input.readBytes(); break; } case 104: { bitField0_ |= 0x00000800; nonceGroup_ = input.readUInt64(); break; } case 112: { bitField0_ |= 0x00001000; nonce_ = input.readUInt64(); break; } case 122: { if (!((mutable_bitField0_ & 0x00001000) != 0)) { stateMessage_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00001000; } stateMessage_.add( input.readMessage(org.apache.hbase.thirdparty.com.google.protobuf.Any.parser(), extensionRegistry)); break; } case 128: { bitField0_ |= 0x00002000; locked_ = input.readBool(); break; } case 136: { bitField0_ |= 0x00004000; bypass_ = input.readBool(); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000040) != 0)) { stackId_.makeImmutable(); // C } if (((mutable_bitField0_ & 0x00001000) != 0)) { stateMessage_ = java.util.Collections.unmodifiableList(stateMessage_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_Procedure_descriptor; } @java.lang.Override protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_Procedure_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder.class); } private int bitField0_; public static final int CLASS_NAME_FIELD_NUMBER = 1; private volatile java.lang.Object className_; /** *
     * internal "static" state
     * 
* * required string class_name = 1; * @return Whether the className field is set. */ @java.lang.Override public boolean hasClassName() { return ((bitField0_ & 0x00000001) != 0); } /** *
     * internal "static" state
     * 
* * required string class_name = 1; * @return The className. */ @java.lang.Override public java.lang.String getClassName() { java.lang.Object ref = className_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs = (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { className_ = s; } return s; } } /** *
     * internal "static" state
     * 
* * required string class_name = 1; * @return The bytes for className. */ @java.lang.Override public org.apache.hbase.thirdparty.com.google.protobuf.ByteString getClassNameBytes() { java.lang.Object ref = className_; if (ref instanceof java.lang.String) { org.apache.hbase.thirdparty.com.google.protobuf.ByteString b = org.apache.hbase.thirdparty.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); className_ = b; return b; } else { return (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref; } } public static final int PARENT_ID_FIELD_NUMBER = 2; private long parentId_; /** *
     * parent if not a root-procedure otherwise not set
     * 
* * optional uint64 parent_id = 2; * @return Whether the parentId field is set. */ @java.lang.Override public boolean hasParentId() { return ((bitField0_ & 0x00000002) != 0); } /** *
     * parent if not a root-procedure otherwise not set
     * 
* * optional uint64 parent_id = 2; * @return The parentId. */ @java.lang.Override public long getParentId() { return parentId_; } public static final int PROC_ID_FIELD_NUMBER = 3; private long procId_; /** * required uint64 proc_id = 3; * @return Whether the procId field is set. */ @java.lang.Override public boolean hasProcId() { return ((bitField0_ & 0x00000004) != 0); } /** * required uint64 proc_id = 3; * @return The procId. */ @java.lang.Override public long getProcId() { return procId_; } public static final int SUBMITTED_TIME_FIELD_NUMBER = 4; private long submittedTime_; /** * required uint64 submitted_time = 4; * @return Whether the submittedTime field is set. */ @java.lang.Override public boolean hasSubmittedTime() { return ((bitField0_ & 0x00000008) != 0); } /** * required uint64 submitted_time = 4; * @return The submittedTime. */ @java.lang.Override public long getSubmittedTime() { return submittedTime_; } public static final int OWNER_FIELD_NUMBER = 5; private volatile java.lang.Object owner_; /** * optional string owner = 5; * @return Whether the owner field is set. */ @java.lang.Override public boolean hasOwner() { return ((bitField0_ & 0x00000010) != 0); } /** * optional string owner = 5; * @return The owner. */ @java.lang.Override public java.lang.String getOwner() { java.lang.Object ref = owner_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs = (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { owner_ = s; } return s; } } /** * optional string owner = 5; * @return The bytes for owner. */ @java.lang.Override public org.apache.hbase.thirdparty.com.google.protobuf.ByteString getOwnerBytes() { java.lang.Object ref = owner_; if (ref instanceof java.lang.String) { org.apache.hbase.thirdparty.com.google.protobuf.ByteString b = org.apache.hbase.thirdparty.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); owner_ = b; return b; } else { return (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref; } } public static final int STATE_FIELD_NUMBER = 6; private int state_; /** *
     * internal "runtime" state
     * 
* * required .hbase.pb.ProcedureState state = 6; * @return Whether the state field is set. */ @java.lang.Override public boolean hasState() { return ((bitField0_ & 0x00000020) != 0); } /** *
     * internal "runtime" state
     * 
* * required .hbase.pb.ProcedureState state = 6; * @return The state. */ @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState getState() { @SuppressWarnings("deprecation") org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState result = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState.valueOf(state_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState.INITIALIZING : result; } public static final int STACK_ID_FIELD_NUMBER = 7; private org.apache.hbase.thirdparty.com.google.protobuf.Internal.IntList stackId_; /** *
     * stack indices in case the procedure was running
     * 
* * repeated uint32 stack_id = 7; * @return A list containing the stackId. */ @java.lang.Override public java.util.List getStackIdList() { return stackId_; } /** *
     * stack indices in case the procedure was running
     * 
* * repeated uint32 stack_id = 7; * @return The count of stackId. */ public int getStackIdCount() { return stackId_.size(); } /** *
     * stack indices in case the procedure was running
     * 
* * repeated uint32 stack_id = 7; * @param index The index of the element to return. * @return The stackId at the given index. */ public int getStackId(int index) { return stackId_.getInt(index); } public static final int LAST_UPDATE_FIELD_NUMBER = 8; private long lastUpdate_; /** * required uint64 last_update = 8; * @return Whether the lastUpdate field is set. */ @java.lang.Override public boolean hasLastUpdate() { return ((bitField0_ & 0x00000040) != 0); } /** * required uint64 last_update = 8; * @return The lastUpdate. */ @java.lang.Override public long getLastUpdate() { return lastUpdate_; } public static final int TIMEOUT_FIELD_NUMBER = 9; private int timeout_; /** * optional uint32 timeout = 9; * @return Whether the timeout field is set. */ @java.lang.Override public boolean hasTimeout() { return ((bitField0_ & 0x00000080) != 0); } /** * optional uint32 timeout = 9; * @return The timeout. */ @java.lang.Override public int getTimeout() { return timeout_; } public static final int EXCEPTION_FIELD_NUMBER = 10; private org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage exception_; /** *
     * user state/results
     * 
* * optional .hbase.pb.ForeignExceptionMessage exception = 10; * @return Whether the exception field is set. */ @java.lang.Override public boolean hasException() { return ((bitField0_ & 0x00000100) != 0); } /** *
     * user state/results
     * 
* * optional .hbase.pb.ForeignExceptionMessage exception = 10; * @return The exception. */ @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage getException() { return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance() : exception_; } /** *
     * user state/results
     * 
* * optional .hbase.pb.ForeignExceptionMessage exception = 10; */ @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder getExceptionOrBuilder() { return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance() : exception_; } public static final int RESULT_FIELD_NUMBER = 11; private org.apache.hbase.thirdparty.com.google.protobuf.ByteString result_; /** *
     * opaque (user) result structure
     * 
* * optional bytes result = 11; * @return Whether the result field is set. */ @java.lang.Override public boolean hasResult() { return ((bitField0_ & 0x00000200) != 0); } /** *
     * opaque (user) result structure
     * 
* * optional bytes result = 11; * @return The result. */ @java.lang.Override public org.apache.hbase.thirdparty.com.google.protobuf.ByteString getResult() { return result_; } public static final int STATE_DATA_FIELD_NUMBER = 12; private org.apache.hbase.thirdparty.com.google.protobuf.ByteString stateData_; /** *
     * opaque (user) procedure internal-state - OBSOLATE
     * 
* * optional bytes state_data = 12; * @return Whether the stateData field is set. */ @java.lang.Override public boolean hasStateData() { return ((bitField0_ & 0x00000400) != 0); } /** *
     * opaque (user) procedure internal-state - OBSOLATE
     * 
* * optional bytes state_data = 12; * @return The stateData. */ @java.lang.Override public org.apache.hbase.thirdparty.com.google.protobuf.ByteString getStateData() { return stateData_; } public static final int STATE_MESSAGE_FIELD_NUMBER = 15; private java.util.List stateMessage_; /** *
     * opaque (user) procedure internal-state
     * 
* * repeated .google.protobuf.Any state_message = 15; */ @java.lang.Override public java.util.List getStateMessageList() { return stateMessage_; } /** *
     * opaque (user) procedure internal-state
     * 
* * repeated .google.protobuf.Any state_message = 15; */ @java.lang.Override public java.util.List getStateMessageOrBuilderList() { return stateMessage_; } /** *
     * opaque (user) procedure internal-state
     * 
* * repeated .google.protobuf.Any state_message = 15; */ @java.lang.Override public int getStateMessageCount() { return stateMessage_.size(); } /** *
     * opaque (user) procedure internal-state
     * 
* * repeated .google.protobuf.Any state_message = 15; */ @java.lang.Override public org.apache.hbase.thirdparty.com.google.protobuf.Any getStateMessage(int index) { return stateMessage_.get(index); } /** *
     * opaque (user) procedure internal-state
     * 
* * repeated .google.protobuf.Any state_message = 15; */ @java.lang.Override public org.apache.hbase.thirdparty.com.google.protobuf.AnyOrBuilder getStateMessageOrBuilder( int index) { return stateMessage_.get(index); } public static final int NONCE_GROUP_FIELD_NUMBER = 13; private long nonceGroup_; /** *
     * Nonce to prevent same procedure submit by multiple times
     * 
* * optional uint64 nonce_group = 13 [default = 0]; * @return Whether the nonceGroup field is set. */ @java.lang.Override public boolean hasNonceGroup() { return ((bitField0_ & 0x00000800) != 0); } /** *
     * Nonce to prevent same procedure submit by multiple times
     * 
* * optional uint64 nonce_group = 13 [default = 0]; * @return The nonceGroup. */ @java.lang.Override public long getNonceGroup() { return nonceGroup_; } public static final int NONCE_FIELD_NUMBER = 14; private long nonce_; /** * optional uint64 nonce = 14 [default = 0]; * @return Whether the nonce field is set. */ @java.lang.Override public boolean hasNonce() { return ((bitField0_ & 0x00001000) != 0); } /** * optional uint64 nonce = 14 [default = 0]; * @return The nonce. */ @java.lang.Override public long getNonce() { return nonce_; } public static final int LOCKED_FIELD_NUMBER = 16; private boolean locked_; /** *
     * whether the procedure has held the lock
     * 
* * optional bool locked = 16 [default = false]; * @return Whether the locked field is set. */ @java.lang.Override public boolean hasLocked() { return ((bitField0_ & 0x00002000) != 0); } /** *
     * whether the procedure has held the lock
     * 
* * optional bool locked = 16 [default = false]; * @return The locked. */ @java.lang.Override public boolean getLocked() { return locked_; } public static final int BYPASS_FIELD_NUMBER = 17; private boolean bypass_; /** *
     * whether the procedure need to be bypassed
     * 
* * optional bool bypass = 17 [default = false]; * @return Whether the bypass field is set. */ @java.lang.Override public boolean hasBypass() { return ((bitField0_ & 0x00004000) != 0); } /** *
     * whether the procedure need to be bypassed
     * 
* * optional bool bypass = 17 [default = false]; * @return The bypass. */ @java.lang.Override public boolean getBypass() { return bypass_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasClassName()) { memoizedIsInitialized = 0; return false; } if (!hasProcId()) { memoizedIsInitialized = 0; return false; } if (!hasSubmittedTime()) { memoizedIsInitialized = 0; return false; } if (!hasState()) { memoizedIsInitialized = 0; return false; } if (!hasLastUpdate()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, className_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeUInt64(2, parentId_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeUInt64(3, procId_); } if (((bitField0_ & 0x00000008) != 0)) { output.writeUInt64(4, submittedTime_); } if (((bitField0_ & 0x00000010) != 0)) { org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.writeString(output, 5, owner_); } if (((bitField0_ & 0x00000020) != 0)) { output.writeEnum(6, state_); } for (int i = 0; i < stackId_.size(); i++) { output.writeUInt32(7, stackId_.getInt(i)); } if (((bitField0_ & 0x00000040) != 0)) { output.writeUInt64(8, lastUpdate_); } if (((bitField0_ & 0x00000080) != 0)) { output.writeUInt32(9, timeout_); } if (((bitField0_ & 0x00000100) != 0)) { output.writeMessage(10, getException()); } if (((bitField0_ & 0x00000200) != 0)) { output.writeBytes(11, result_); } if (((bitField0_ & 0x00000400) != 0)) { output.writeBytes(12, stateData_); } if (((bitField0_ & 0x00000800) != 0)) { output.writeUInt64(13, nonceGroup_); } if (((bitField0_ & 0x00001000) != 0)) { output.writeUInt64(14, nonce_); } for (int i = 0; i < stateMessage_.size(); i++) { output.writeMessage(15, stateMessage_.get(i)); } if (((bitField0_ & 0x00002000) != 0)) { output.writeBool(16, locked_); } if (((bitField0_ & 0x00004000) != 0)) { output.writeBool(17, bypass_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, className_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeUInt64Size(2, parentId_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeUInt64Size(3, procId_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeUInt64Size(4, submittedTime_); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.computeStringSize(5, owner_); } if (((bitField0_ & 0x00000020) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeEnumSize(6, state_); } { int dataSize = 0; for (int i = 0; i < stackId_.size(); i++) { dataSize += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeUInt32SizeNoTag(stackId_.getInt(i)); } size += dataSize; size += 1 * getStackIdList().size(); } if (((bitField0_ & 0x00000040) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeUInt64Size(8, lastUpdate_); } if (((bitField0_ & 0x00000080) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeUInt32Size(9, timeout_); } if (((bitField0_ & 0x00000100) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeMessageSize(10, getException()); } if (((bitField0_ & 0x00000200) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeBytesSize(11, result_); } if (((bitField0_ & 0x00000400) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeBytesSize(12, stateData_); } if (((bitField0_ & 0x00000800) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeUInt64Size(13, nonceGroup_); } if (((bitField0_ & 0x00001000) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeUInt64Size(14, nonce_); } for (int i = 0; i < stateMessage_.size(); i++) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeMessageSize(15, stateMessage_.get(i)); } if (((bitField0_ & 0x00002000) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeBoolSize(16, locked_); } if (((bitField0_ & 0x00004000) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeBoolSize(17, bypass_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure) obj; if (hasClassName() != other.hasClassName()) return false; if (hasClassName()) { if (!getClassName() .equals(other.getClassName())) return false; } if (hasParentId() != other.hasParentId()) return false; if (hasParentId()) { if (getParentId() != other.getParentId()) return false; } if (hasProcId() != other.hasProcId()) return false; if (hasProcId()) { if (getProcId() != other.getProcId()) return false; } if (hasSubmittedTime() != other.hasSubmittedTime()) return false; if (hasSubmittedTime()) { if (getSubmittedTime() != other.getSubmittedTime()) return false; } if (hasOwner() != other.hasOwner()) return false; if (hasOwner()) { if (!getOwner() .equals(other.getOwner())) return false; } if (hasState() != other.hasState()) return false; if (hasState()) { if (state_ != other.state_) return false; } if (!getStackIdList() .equals(other.getStackIdList())) return false; if (hasLastUpdate() != other.hasLastUpdate()) return false; if (hasLastUpdate()) { if (getLastUpdate() != other.getLastUpdate()) return false; } if (hasTimeout() != other.hasTimeout()) return false; if (hasTimeout()) { if (getTimeout() != other.getTimeout()) return false; } if (hasException() != other.hasException()) return false; if (hasException()) { if (!getException() .equals(other.getException())) return false; } if (hasResult() != other.hasResult()) return false; if (hasResult()) { if (!getResult() .equals(other.getResult())) return false; } if (hasStateData() != other.hasStateData()) return false; if (hasStateData()) { if (!getStateData() .equals(other.getStateData())) return false; } if (!getStateMessageList() .equals(other.getStateMessageList())) return false; if (hasNonceGroup() != other.hasNonceGroup()) return false; if (hasNonceGroup()) { if (getNonceGroup() != other.getNonceGroup()) return false; } if (hasNonce() != other.hasNonce()) return false; if (hasNonce()) { if (getNonce() != other.getNonce()) return false; } if (hasLocked() != other.hasLocked()) return false; if (hasLocked()) { if (getLocked() != other.getLocked()) return false; } if (hasBypass() != other.hasBypass()) return false; if (hasBypass()) { if (getBypass() != other.getBypass()) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasClassName()) { hash = (37 * hash) + CLASS_NAME_FIELD_NUMBER; hash = (53 * hash) + getClassName().hashCode(); } if (hasParentId()) { hash = (37 * hash) + PARENT_ID_FIELD_NUMBER; hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong( getParentId()); } if (hasProcId()) { hash = (37 * hash) + PROC_ID_FIELD_NUMBER; hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong( getProcId()); } if (hasSubmittedTime()) { hash = (37 * hash) + SUBMITTED_TIME_FIELD_NUMBER; hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong( getSubmittedTime()); } if (hasOwner()) { hash = (37 * hash) + OWNER_FIELD_NUMBER; hash = (53 * hash) + getOwner().hashCode(); } if (hasState()) { hash = (37 * hash) + STATE_FIELD_NUMBER; hash = (53 * hash) + state_; } if (getStackIdCount() > 0) { hash = (37 * hash) + STACK_ID_FIELD_NUMBER; hash = (53 * hash) + getStackIdList().hashCode(); } if (hasLastUpdate()) { hash = (37 * hash) + LAST_UPDATE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong( getLastUpdate()); } if (hasTimeout()) { hash = (37 * hash) + TIMEOUT_FIELD_NUMBER; hash = (53 * hash) + getTimeout(); } if (hasException()) { hash = (37 * hash) + EXCEPTION_FIELD_NUMBER; hash = (53 * hash) + getException().hashCode(); } if (hasResult()) { hash = (37 * hash) + RESULT_FIELD_NUMBER; hash = (53 * hash) + getResult().hashCode(); } if (hasStateData()) { hash = (37 * hash) + STATE_DATA_FIELD_NUMBER; hash = (53 * hash) + getStateData().hashCode(); } if (getStateMessageCount() > 0) { hash = (37 * hash) + STATE_MESSAGE_FIELD_NUMBER; hash = (53 * hash) + getStateMessageList().hashCode(); } if (hasNonceGroup()) { hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER; hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong( getNonceGroup()); } if (hasNonce()) { hash = (37 * hash) + NONCE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong( getNonce()); } if (hasLocked()) { hash = (37 * hash) + LOCKED_FIELD_NUMBER; hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashBoolean( getLocked()); } if (hasBypass()) { hash = (37 * hash) + BYPASS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashBoolean( getBypass()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure parseFrom( java.nio.ByteBuffer data) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure parseFrom( java.nio.ByteBuffer data, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.ByteString data) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.ByteString data, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure parseFrom(byte[] data) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure parseFrom( byte[] data, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure parseFrom( java.io.InputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure parseDelimitedFrom( java.io.InputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** *
     **
     * Procedure metadata, serialized by the ProcedureStore to be able to recover the old state.
     * 
* * Protobuf type {@code hbase.pb.Procedure} */ @javax.annotation.Generated("proto") public static final class Builder extends org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hbase.pb.Procedure) org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder { public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_Procedure_descriptor; } @java.lang.Override protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_Procedure_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getExceptionFieldBuilder(); getStateMessageFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); className_ = ""; bitField0_ = (bitField0_ & ~0x00000001); parentId_ = 0L; bitField0_ = (bitField0_ & ~0x00000002); procId_ = 0L; bitField0_ = (bitField0_ & ~0x00000004); submittedTime_ = 0L; bitField0_ = (bitField0_ & ~0x00000008); owner_ = ""; bitField0_ = (bitField0_ & ~0x00000010); state_ = 1; bitField0_ = (bitField0_ & ~0x00000020); stackId_ = emptyIntList(); bitField0_ = (bitField0_ & ~0x00000040); lastUpdate_ = 0L; bitField0_ = (bitField0_ & ~0x00000080); timeout_ = 0; bitField0_ = (bitField0_ & ~0x00000100); if (exceptionBuilder_ == null) { exception_ = null; } else { exceptionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000200); result_ = org.apache.hbase.thirdparty.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000400); stateData_ = org.apache.hbase.thirdparty.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000800); if (stateMessageBuilder_ == null) { stateMessage_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00001000); } else { stateMessageBuilder_.clear(); } nonceGroup_ = 0L; bitField0_ = (bitField0_ & ~0x00002000); nonce_ = 0L; bitField0_ = (bitField0_ & ~0x00004000); locked_ = false; bitField0_ = (bitField0_ & ~0x00008000); bypass_ = false; bitField0_ = (bitField0_ & ~0x00010000); return this; } @java.lang.Override public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_Procedure_descriptor; } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { to_bitField0_ |= 0x00000001; } result.className_ = className_; if (((from_bitField0_ & 0x00000002) != 0)) { result.parentId_ = parentId_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.procId_ = procId_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.submittedTime_ = submittedTime_; to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000010) != 0)) { to_bitField0_ |= 0x00000010; } result.owner_ = owner_; if (((from_bitField0_ & 0x00000020) != 0)) { to_bitField0_ |= 0x00000020; } result.state_ = state_; if (((bitField0_ & 0x00000040) != 0)) { stackId_.makeImmutable(); bitField0_ = (bitField0_ & ~0x00000040); } result.stackId_ = stackId_; if (((from_bitField0_ & 0x00000080) != 0)) { result.lastUpdate_ = lastUpdate_; to_bitField0_ |= 0x00000040; } if (((from_bitField0_ & 0x00000100) != 0)) { result.timeout_ = timeout_; to_bitField0_ |= 0x00000080; } if (((from_bitField0_ & 0x00000200) != 0)) { if (exceptionBuilder_ == null) { result.exception_ = exception_; } else { result.exception_ = exceptionBuilder_.build(); } to_bitField0_ |= 0x00000100; } if (((from_bitField0_ & 0x00000400) != 0)) { to_bitField0_ |= 0x00000200; } result.result_ = result_; if (((from_bitField0_ & 0x00000800) != 0)) { to_bitField0_ |= 0x00000400; } result.stateData_ = stateData_; if (stateMessageBuilder_ == null) { if (((bitField0_ & 0x00001000) != 0)) { stateMessage_ = java.util.Collections.unmodifiableList(stateMessage_); bitField0_ = (bitField0_ & ~0x00001000); } result.stateMessage_ = stateMessage_; } else { result.stateMessage_ = stateMessageBuilder_.build(); } if (((from_bitField0_ & 0x00002000) != 0)) { result.nonceGroup_ = nonceGroup_; to_bitField0_ |= 0x00000800; } if (((from_bitField0_ & 0x00004000) != 0)) { result.nonce_ = nonce_; to_bitField0_ |= 0x00001000; } if (((from_bitField0_ & 0x00008000) != 0)) { result.locked_ = locked_; to_bitField0_ |= 0x00002000; } if (((from_bitField0_ & 0x00010000) != 0)) { result.bypass_ = bypass_; to_bitField0_ |= 0x00004000; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance()) return this; if (other.hasClassName()) { bitField0_ |= 0x00000001; className_ = other.className_; onChanged(); } if (other.hasParentId()) { setParentId(other.getParentId()); } if (other.hasProcId()) { setProcId(other.getProcId()); } if (other.hasSubmittedTime()) { setSubmittedTime(other.getSubmittedTime()); } if (other.hasOwner()) { bitField0_ |= 0x00000010; owner_ = other.owner_; onChanged(); } if (other.hasState()) { setState(other.getState()); } if (!other.stackId_.isEmpty()) { if (stackId_.isEmpty()) { stackId_ = other.stackId_; bitField0_ = (bitField0_ & ~0x00000040); } else { ensureStackIdIsMutable(); stackId_.addAll(other.stackId_); } onChanged(); } if (other.hasLastUpdate()) { setLastUpdate(other.getLastUpdate()); } if (other.hasTimeout()) { setTimeout(other.getTimeout()); } if (other.hasException()) { mergeException(other.getException()); } if (other.hasResult()) { setResult(other.getResult()); } if (other.hasStateData()) { setStateData(other.getStateData()); } if (stateMessageBuilder_ == null) { if (!other.stateMessage_.isEmpty()) { if (stateMessage_.isEmpty()) { stateMessage_ = other.stateMessage_; bitField0_ = (bitField0_ & ~0x00001000); } else { ensureStateMessageIsMutable(); stateMessage_.addAll(other.stateMessage_); } onChanged(); } } else { if (!other.stateMessage_.isEmpty()) { if (stateMessageBuilder_.isEmpty()) { stateMessageBuilder_.dispose(); stateMessageBuilder_ = null; stateMessage_ = other.stateMessage_; bitField0_ = (bitField0_ & ~0x00001000); stateMessageBuilder_ = org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getStateMessageFieldBuilder() : null; } else { stateMessageBuilder_.addAllMessages(other.stateMessage_); } } } if (other.hasNonceGroup()) { setNonceGroup(other.getNonceGroup()); } if (other.hasNonce()) { setNonce(other.getNonce()); } if (other.hasLocked()) { setLocked(other.getLocked()); } if (other.hasBypass()) { setBypass(other.getBypass()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasClassName()) { return false; } if (!hasProcId()) { return false; } if (!hasSubmittedTime()) { return false; } if (!hasState()) { return false; } if (!hasLastUpdate()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object className_ = ""; /** *
       * internal "static" state
       * 
* * required string class_name = 1; * @return Whether the className field is set. */ public boolean hasClassName() { return ((bitField0_ & 0x00000001) != 0); } /** *
       * internal "static" state
       * 
* * required string class_name = 1; * @return The className. */ public java.lang.String getClassName() { java.lang.Object ref = className_; if (!(ref instanceof java.lang.String)) { org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs = (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { className_ = s; } return s; } else { return (java.lang.String) ref; } } /** *
       * internal "static" state
       * 
* * required string class_name = 1; * @return The bytes for className. */ public org.apache.hbase.thirdparty.com.google.protobuf.ByteString getClassNameBytes() { java.lang.Object ref = className_; if (ref instanceof String) { org.apache.hbase.thirdparty.com.google.protobuf.ByteString b = org.apache.hbase.thirdparty.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); className_ = b; return b; } else { return (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref; } } /** *
       * internal "static" state
       * 
* * required string class_name = 1; * @param value The className to set. * @return This builder for chaining. */ public Builder setClassName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; className_ = value; onChanged(); return this; } /** *
       * internal "static" state
       * 
* * required string class_name = 1; * @return This builder for chaining. */ public Builder clearClassName() { bitField0_ = (bitField0_ & ~0x00000001); className_ = getDefaultInstance().getClassName(); onChanged(); return this; } /** *
       * internal "static" state
       * 
* * required string class_name = 1; * @param value The bytes for className to set. * @return This builder for chaining. */ public Builder setClassNameBytes( org.apache.hbase.thirdparty.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; className_ = value; onChanged(); return this; } private long parentId_ ; /** *
       * parent if not a root-procedure otherwise not set
       * 
* * optional uint64 parent_id = 2; * @return Whether the parentId field is set. */ @java.lang.Override public boolean hasParentId() { return ((bitField0_ & 0x00000002) != 0); } /** *
       * parent if not a root-procedure otherwise not set
       * 
* * optional uint64 parent_id = 2; * @return The parentId. */ @java.lang.Override public long getParentId() { return parentId_; } /** *
       * parent if not a root-procedure otherwise not set
       * 
* * optional uint64 parent_id = 2; * @param value The parentId to set. * @return This builder for chaining. */ public Builder setParentId(long value) { bitField0_ |= 0x00000002; parentId_ = value; onChanged(); return this; } /** *
       * parent if not a root-procedure otherwise not set
       * 
* * optional uint64 parent_id = 2; * @return This builder for chaining. */ public Builder clearParentId() { bitField0_ = (bitField0_ & ~0x00000002); parentId_ = 0L; onChanged(); return this; } private long procId_ ; /** * required uint64 proc_id = 3; * @return Whether the procId field is set. */ @java.lang.Override public boolean hasProcId() { return ((bitField0_ & 0x00000004) != 0); } /** * required uint64 proc_id = 3; * @return The procId. */ @java.lang.Override public long getProcId() { return procId_; } /** * required uint64 proc_id = 3; * @param value The procId to set. * @return This builder for chaining. */ public Builder setProcId(long value) { bitField0_ |= 0x00000004; procId_ = value; onChanged(); return this; } /** * required uint64 proc_id = 3; * @return This builder for chaining. */ public Builder clearProcId() { bitField0_ = (bitField0_ & ~0x00000004); procId_ = 0L; onChanged(); return this; } private long submittedTime_ ; /** * required uint64 submitted_time = 4; * @return Whether the submittedTime field is set. */ @java.lang.Override public boolean hasSubmittedTime() { return ((bitField0_ & 0x00000008) != 0); } /** * required uint64 submitted_time = 4; * @return The submittedTime. */ @java.lang.Override public long getSubmittedTime() { return submittedTime_; } /** * required uint64 submitted_time = 4; * @param value The submittedTime to set. * @return This builder for chaining. */ public Builder setSubmittedTime(long value) { bitField0_ |= 0x00000008; submittedTime_ = value; onChanged(); return this; } /** * required uint64 submitted_time = 4; * @return This builder for chaining. */ public Builder clearSubmittedTime() { bitField0_ = (bitField0_ & ~0x00000008); submittedTime_ = 0L; onChanged(); return this; } private java.lang.Object owner_ = ""; /** * optional string owner = 5; * @return Whether the owner field is set. */ public boolean hasOwner() { return ((bitField0_ & 0x00000010) != 0); } /** * optional string owner = 5; * @return The owner. */ public java.lang.String getOwner() { java.lang.Object ref = owner_; if (!(ref instanceof java.lang.String)) { org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs = (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { owner_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string owner = 5; * @return The bytes for owner. */ public org.apache.hbase.thirdparty.com.google.protobuf.ByteString getOwnerBytes() { java.lang.Object ref = owner_; if (ref instanceof String) { org.apache.hbase.thirdparty.com.google.protobuf.ByteString b = org.apache.hbase.thirdparty.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); owner_ = b; return b; } else { return (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref; } } /** * optional string owner = 5; * @param value The owner to set. * @return This builder for chaining. */ public Builder setOwner( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000010; owner_ = value; onChanged(); return this; } /** * optional string owner = 5; * @return This builder for chaining. */ public Builder clearOwner() { bitField0_ = (bitField0_ & ~0x00000010); owner_ = getDefaultInstance().getOwner(); onChanged(); return this; } /** * optional string owner = 5; * @param value The bytes for owner to set. * @return This builder for chaining. */ public Builder setOwnerBytes( org.apache.hbase.thirdparty.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000010; owner_ = value; onChanged(); return this; } private int state_ = 1; /** *
       * internal "runtime" state
       * 
* * required .hbase.pb.ProcedureState state = 6; * @return Whether the state field is set. */ @java.lang.Override public boolean hasState() { return ((bitField0_ & 0x00000020) != 0); } /** *
       * internal "runtime" state
       * 
* * required .hbase.pb.ProcedureState state = 6; * @return The state. */ @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState getState() { @SuppressWarnings("deprecation") org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState result = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState.valueOf(state_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState.INITIALIZING : result; } /** *
       * internal "runtime" state
       * 
* * required .hbase.pb.ProcedureState state = 6; * @param value The state to set. * @return This builder for chaining. */ public Builder setState(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000020; state_ = value.getNumber(); onChanged(); return this; } /** *
       * internal "runtime" state
       * 
* * required .hbase.pb.ProcedureState state = 6; * @return This builder for chaining. */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000020); state_ = 1; onChanged(); return this; } private org.apache.hbase.thirdparty.com.google.protobuf.Internal.IntList stackId_ = emptyIntList(); private void ensureStackIdIsMutable() { if (!((bitField0_ & 0x00000040) != 0)) { stackId_ = mutableCopy(stackId_); bitField0_ |= 0x00000040; } } /** *
       * stack indices in case the procedure was running
       * 
* * repeated uint32 stack_id = 7; * @return A list containing the stackId. */ public java.util.List getStackIdList() { return ((bitField0_ & 0x00000040) != 0) ? java.util.Collections.unmodifiableList(stackId_) : stackId_; } /** *
       * stack indices in case the procedure was running
       * 
* * repeated uint32 stack_id = 7; * @return The count of stackId. */ public int getStackIdCount() { return stackId_.size(); } /** *
       * stack indices in case the procedure was running
       * 
* * repeated uint32 stack_id = 7; * @param index The index of the element to return. * @return The stackId at the given index. */ public int getStackId(int index) { return stackId_.getInt(index); } /** *
       * stack indices in case the procedure was running
       * 
* * repeated uint32 stack_id = 7; * @param index The index to set the value at. * @param value The stackId to set. * @return This builder for chaining. */ public Builder setStackId( int index, int value) { ensureStackIdIsMutable(); stackId_.setInt(index, value); onChanged(); return this; } /** *
       * stack indices in case the procedure was running
       * 
* * repeated uint32 stack_id = 7; * @param value The stackId to add. * @return This builder for chaining. */ public Builder addStackId(int value) { ensureStackIdIsMutable(); stackId_.addInt(value); onChanged(); return this; } /** *
       * stack indices in case the procedure was running
       * 
* * repeated uint32 stack_id = 7; * @param values The stackId to add. * @return This builder for chaining. */ public Builder addAllStackId( java.lang.Iterable values) { ensureStackIdIsMutable(); org.apache.hbase.thirdparty.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, stackId_); onChanged(); return this; } /** *
       * stack indices in case the procedure was running
       * 
* * repeated uint32 stack_id = 7; * @return This builder for chaining. */ public Builder clearStackId() { stackId_ = emptyIntList(); bitField0_ = (bitField0_ & ~0x00000040); onChanged(); return this; } private long lastUpdate_ ; /** * required uint64 last_update = 8; * @return Whether the lastUpdate field is set. */ @java.lang.Override public boolean hasLastUpdate() { return ((bitField0_ & 0x00000080) != 0); } /** * required uint64 last_update = 8; * @return The lastUpdate. */ @java.lang.Override public long getLastUpdate() { return lastUpdate_; } /** * required uint64 last_update = 8; * @param value The lastUpdate to set. * @return This builder for chaining. */ public Builder setLastUpdate(long value) { bitField0_ |= 0x00000080; lastUpdate_ = value; onChanged(); return this; } /** * required uint64 last_update = 8; * @return This builder for chaining. */ public Builder clearLastUpdate() { bitField0_ = (bitField0_ & ~0x00000080); lastUpdate_ = 0L; onChanged(); return this; } private int timeout_ ; /** * optional uint32 timeout = 9; * @return Whether the timeout field is set. */ @java.lang.Override public boolean hasTimeout() { return ((bitField0_ & 0x00000100) != 0); } /** * optional uint32 timeout = 9; * @return The timeout. */ @java.lang.Override public int getTimeout() { return timeout_; } /** * optional uint32 timeout = 9; * @param value The timeout to set. * @return This builder for chaining. */ public Builder setTimeout(int value) { bitField0_ |= 0x00000100; timeout_ = value; onChanged(); return this; } /** * optional uint32 timeout = 9; * @return This builder for chaining. */ public Builder clearTimeout() { bitField0_ = (bitField0_ & ~0x00000100); timeout_ = 0; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage exception_; private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder> exceptionBuilder_; /** *
       * user state/results
       * 
* * optional .hbase.pb.ForeignExceptionMessage exception = 10; * @return Whether the exception field is set. */ public boolean hasException() { return ((bitField0_ & 0x00000200) != 0); } /** *
       * user state/results
       * 
* * optional .hbase.pb.ForeignExceptionMessage exception = 10; * @return The exception. */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage getException() { if (exceptionBuilder_ == null) { return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance() : exception_; } else { return exceptionBuilder_.getMessage(); } } /** *
       * user state/results
       * 
* * optional .hbase.pb.ForeignExceptionMessage exception = 10; */ public Builder setException(org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage value) { if (exceptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } exception_ = value; onChanged(); } else { exceptionBuilder_.setMessage(value); } bitField0_ |= 0x00000200; return this; } /** *
       * user state/results
       * 
* * optional .hbase.pb.ForeignExceptionMessage exception = 10; */ public Builder setException( org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder builderForValue) { if (exceptionBuilder_ == null) { exception_ = builderForValue.build(); onChanged(); } else { exceptionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000200; return this; } /** *
       * user state/results
       * 
* * optional .hbase.pb.ForeignExceptionMessage exception = 10; */ public Builder mergeException(org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage value) { if (exceptionBuilder_ == null) { if (((bitField0_ & 0x00000200) != 0) && exception_ != null && exception_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance()) { exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.newBuilder(exception_).mergeFrom(value).buildPartial(); } else { exception_ = value; } onChanged(); } else { exceptionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000200; return this; } /** *
       * user state/results
       * 
* * optional .hbase.pb.ForeignExceptionMessage exception = 10; */ public Builder clearException() { if (exceptionBuilder_ == null) { exception_ = null; onChanged(); } else { exceptionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000200); return this; } /** *
       * user state/results
       * 
* * optional .hbase.pb.ForeignExceptionMessage exception = 10; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder getExceptionBuilder() { bitField0_ |= 0x00000200; onChanged(); return getExceptionFieldBuilder().getBuilder(); } /** *
       * user state/results
       * 
* * optional .hbase.pb.ForeignExceptionMessage exception = 10; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder getExceptionOrBuilder() { if (exceptionBuilder_ != null) { return exceptionBuilder_.getMessageOrBuilder(); } else { return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance() : exception_; } } /** *
       * user state/results
       * 
* * optional .hbase.pb.ForeignExceptionMessage exception = 10; */ private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder> getExceptionFieldBuilder() { if (exceptionBuilder_ == null) { exceptionBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder>( getException(), getParentForChildren(), isClean()); exception_ = null; } return exceptionBuilder_; } private org.apache.hbase.thirdparty.com.google.protobuf.ByteString result_ = org.apache.hbase.thirdparty.com.google.protobuf.ByteString.EMPTY; /** *
       * opaque (user) result structure
       * 
* * optional bytes result = 11; * @return Whether the result field is set. */ @java.lang.Override public boolean hasResult() { return ((bitField0_ & 0x00000400) != 0); } /** *
       * opaque (user) result structure
       * 
* * optional bytes result = 11; * @return The result. */ @java.lang.Override public org.apache.hbase.thirdparty.com.google.protobuf.ByteString getResult() { return result_; } /** *
       * opaque (user) result structure
       * 
* * optional bytes result = 11; * @param value The result to set. * @return This builder for chaining. */ public Builder setResult(org.apache.hbase.thirdparty.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000400; result_ = value; onChanged(); return this; } /** *
       * opaque (user) result structure
       * 
* * optional bytes result = 11; * @return This builder for chaining. */ public Builder clearResult() { bitField0_ = (bitField0_ & ~0x00000400); result_ = getDefaultInstance().getResult(); onChanged(); return this; } private org.apache.hbase.thirdparty.com.google.protobuf.ByteString stateData_ = org.apache.hbase.thirdparty.com.google.protobuf.ByteString.EMPTY; /** *
       * opaque (user) procedure internal-state - OBSOLATE
       * 
* * optional bytes state_data = 12; * @return Whether the stateData field is set. */ @java.lang.Override public boolean hasStateData() { return ((bitField0_ & 0x00000800) != 0); } /** *
       * opaque (user) procedure internal-state - OBSOLATE
       * 
* * optional bytes state_data = 12; * @return The stateData. */ @java.lang.Override public org.apache.hbase.thirdparty.com.google.protobuf.ByteString getStateData() { return stateData_; } /** *
       * opaque (user) procedure internal-state - OBSOLATE
       * 
* * optional bytes state_data = 12; * @param value The stateData to set. * @return This builder for chaining. */ public Builder setStateData(org.apache.hbase.thirdparty.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000800; stateData_ = value; onChanged(); return this; } /** *
       * opaque (user) procedure internal-state - OBSOLATE
       * 
* * optional bytes state_data = 12; * @return This builder for chaining. */ public Builder clearStateData() { bitField0_ = (bitField0_ & ~0x00000800); stateData_ = getDefaultInstance().getStateData(); onChanged(); return this; } private java.util.List stateMessage_ = java.util.Collections.emptyList(); private void ensureStateMessageIsMutable() { if (!((bitField0_ & 0x00001000) != 0)) { stateMessage_ = new java.util.ArrayList(stateMessage_); bitField0_ |= 0x00001000; } } private org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hbase.thirdparty.com.google.protobuf.Any, org.apache.hbase.thirdparty.com.google.protobuf.Any.Builder, org.apache.hbase.thirdparty.com.google.protobuf.AnyOrBuilder> stateMessageBuilder_; /** *
       * opaque (user) procedure internal-state
       * 
* * repeated .google.protobuf.Any state_message = 15; */ public java.util.List getStateMessageList() { if (stateMessageBuilder_ == null) { return java.util.Collections.unmodifiableList(stateMessage_); } else { return stateMessageBuilder_.getMessageList(); } } /** *
       * opaque (user) procedure internal-state
       * 
* * repeated .google.protobuf.Any state_message = 15; */ public int getStateMessageCount() { if (stateMessageBuilder_ == null) { return stateMessage_.size(); } else { return stateMessageBuilder_.getCount(); } } /** *
       * opaque (user) procedure internal-state
       * 
* * repeated .google.protobuf.Any state_message = 15; */ public org.apache.hbase.thirdparty.com.google.protobuf.Any getStateMessage(int index) { if (stateMessageBuilder_ == null) { return stateMessage_.get(index); } else { return stateMessageBuilder_.getMessage(index); } } /** *
       * opaque (user) procedure internal-state
       * 
* * repeated .google.protobuf.Any state_message = 15; */ public Builder setStateMessage( int index, org.apache.hbase.thirdparty.com.google.protobuf.Any value) { if (stateMessageBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStateMessageIsMutable(); stateMessage_.set(index, value); onChanged(); } else { stateMessageBuilder_.setMessage(index, value); } return this; } /** *
       * opaque (user) procedure internal-state
       * 
* * repeated .google.protobuf.Any state_message = 15; */ public Builder setStateMessage( int index, org.apache.hbase.thirdparty.com.google.protobuf.Any.Builder builderForValue) { if (stateMessageBuilder_ == null) { ensureStateMessageIsMutable(); stateMessage_.set(index, builderForValue.build()); onChanged(); } else { stateMessageBuilder_.setMessage(index, builderForValue.build()); } return this; } /** *
       * opaque (user) procedure internal-state
       * 
* * repeated .google.protobuf.Any state_message = 15; */ public Builder addStateMessage(org.apache.hbase.thirdparty.com.google.protobuf.Any value) { if (stateMessageBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStateMessageIsMutable(); stateMessage_.add(value); onChanged(); } else { stateMessageBuilder_.addMessage(value); } return this; } /** *
       * opaque (user) procedure internal-state
       * 
* * repeated .google.protobuf.Any state_message = 15; */ public Builder addStateMessage( int index, org.apache.hbase.thirdparty.com.google.protobuf.Any value) { if (stateMessageBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStateMessageIsMutable(); stateMessage_.add(index, value); onChanged(); } else { stateMessageBuilder_.addMessage(index, value); } return this; } /** *
       * opaque (user) procedure internal-state
       * 
* * repeated .google.protobuf.Any state_message = 15; */ public Builder addStateMessage( org.apache.hbase.thirdparty.com.google.protobuf.Any.Builder builderForValue) { if (stateMessageBuilder_ == null) { ensureStateMessageIsMutable(); stateMessage_.add(builderForValue.build()); onChanged(); } else { stateMessageBuilder_.addMessage(builderForValue.build()); } return this; } /** *
       * opaque (user) procedure internal-state
       * 
* * repeated .google.protobuf.Any state_message = 15; */ public Builder addStateMessage( int index, org.apache.hbase.thirdparty.com.google.protobuf.Any.Builder builderForValue) { if (stateMessageBuilder_ == null) { ensureStateMessageIsMutable(); stateMessage_.add(index, builderForValue.build()); onChanged(); } else { stateMessageBuilder_.addMessage(index, builderForValue.build()); } return this; } /** *
       * opaque (user) procedure internal-state
       * 
* * repeated .google.protobuf.Any state_message = 15; */ public Builder addAllStateMessage( java.lang.Iterable values) { if (stateMessageBuilder_ == null) { ensureStateMessageIsMutable(); org.apache.hbase.thirdparty.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, stateMessage_); onChanged(); } else { stateMessageBuilder_.addAllMessages(values); } return this; } /** *
       * opaque (user) procedure internal-state
       * 
* * repeated .google.protobuf.Any state_message = 15; */ public Builder clearStateMessage() { if (stateMessageBuilder_ == null) { stateMessage_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00001000); onChanged(); } else { stateMessageBuilder_.clear(); } return this; } /** *
       * opaque (user) procedure internal-state
       * 
* * repeated .google.protobuf.Any state_message = 15; */ public Builder removeStateMessage(int index) { if (stateMessageBuilder_ == null) { ensureStateMessageIsMutable(); stateMessage_.remove(index); onChanged(); } else { stateMessageBuilder_.remove(index); } return this; } /** *
       * opaque (user) procedure internal-state
       * 
* * repeated .google.protobuf.Any state_message = 15; */ public org.apache.hbase.thirdparty.com.google.protobuf.Any.Builder getStateMessageBuilder( int index) { return getStateMessageFieldBuilder().getBuilder(index); } /** *
       * opaque (user) procedure internal-state
       * 
* * repeated .google.protobuf.Any state_message = 15; */ public org.apache.hbase.thirdparty.com.google.protobuf.AnyOrBuilder getStateMessageOrBuilder( int index) { if (stateMessageBuilder_ == null) { return stateMessage_.get(index); } else { return stateMessageBuilder_.getMessageOrBuilder(index); } } /** *
       * opaque (user) procedure internal-state
       * 
* * repeated .google.protobuf.Any state_message = 15; */ public java.util.List getStateMessageOrBuilderList() { if (stateMessageBuilder_ != null) { return stateMessageBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(stateMessage_); } } /** *
       * opaque (user) procedure internal-state
       * 
* * repeated .google.protobuf.Any state_message = 15; */ public org.apache.hbase.thirdparty.com.google.protobuf.Any.Builder addStateMessageBuilder() { return getStateMessageFieldBuilder().addBuilder( org.apache.hbase.thirdparty.com.google.protobuf.Any.getDefaultInstance()); } /** *
       * opaque (user) procedure internal-state
       * 
* * repeated .google.protobuf.Any state_message = 15; */ public org.apache.hbase.thirdparty.com.google.protobuf.Any.Builder addStateMessageBuilder( int index) { return getStateMessageFieldBuilder().addBuilder( index, org.apache.hbase.thirdparty.com.google.protobuf.Any.getDefaultInstance()); } /** *
       * opaque (user) procedure internal-state
       * 
* * repeated .google.protobuf.Any state_message = 15; */ public java.util.List getStateMessageBuilderList() { return getStateMessageFieldBuilder().getBuilderList(); } private org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hbase.thirdparty.com.google.protobuf.Any, org.apache.hbase.thirdparty.com.google.protobuf.Any.Builder, org.apache.hbase.thirdparty.com.google.protobuf.AnyOrBuilder> getStateMessageFieldBuilder() { if (stateMessageBuilder_ == null) { stateMessageBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hbase.thirdparty.com.google.protobuf.Any, org.apache.hbase.thirdparty.com.google.protobuf.Any.Builder, org.apache.hbase.thirdparty.com.google.protobuf.AnyOrBuilder>( stateMessage_, ((bitField0_ & 0x00001000) != 0), getParentForChildren(), isClean()); stateMessage_ = null; } return stateMessageBuilder_; } private long nonceGroup_ ; /** *
       * Nonce to prevent same procedure submit by multiple times
       * 
* * optional uint64 nonce_group = 13 [default = 0]; * @return Whether the nonceGroup field is set. */ @java.lang.Override public boolean hasNonceGroup() { return ((bitField0_ & 0x00002000) != 0); } /** *
       * Nonce to prevent same procedure submit by multiple times
       * 
* * optional uint64 nonce_group = 13 [default = 0]; * @return The nonceGroup. */ @java.lang.Override public long getNonceGroup() { return nonceGroup_; } /** *
       * Nonce to prevent same procedure submit by multiple times
       * 
* * optional uint64 nonce_group = 13 [default = 0]; * @param value The nonceGroup to set. * @return This builder for chaining. */ public Builder setNonceGroup(long value) { bitField0_ |= 0x00002000; nonceGroup_ = value; onChanged(); return this; } /** *
       * Nonce to prevent same procedure submit by multiple times
       * 
* * optional uint64 nonce_group = 13 [default = 0]; * @return This builder for chaining. */ public Builder clearNonceGroup() { bitField0_ = (bitField0_ & ~0x00002000); nonceGroup_ = 0L; onChanged(); return this; } private long nonce_ ; /** * optional uint64 nonce = 14 [default = 0]; * @return Whether the nonce field is set. */ @java.lang.Override public boolean hasNonce() { return ((bitField0_ & 0x00004000) != 0); } /** * optional uint64 nonce = 14 [default = 0]; * @return The nonce. */ @java.lang.Override public long getNonce() { return nonce_; } /** * optional uint64 nonce = 14 [default = 0]; * @param value The nonce to set. * @return This builder for chaining. */ public Builder setNonce(long value) { bitField0_ |= 0x00004000; nonce_ = value; onChanged(); return this; } /** * optional uint64 nonce = 14 [default = 0]; * @return This builder for chaining. */ public Builder clearNonce() { bitField0_ = (bitField0_ & ~0x00004000); nonce_ = 0L; onChanged(); return this; } private boolean locked_ ; /** *
       * whether the procedure has held the lock
       * 
* * optional bool locked = 16 [default = false]; * @return Whether the locked field is set. */ @java.lang.Override public boolean hasLocked() { return ((bitField0_ & 0x00008000) != 0); } /** *
       * whether the procedure has held the lock
       * 
* * optional bool locked = 16 [default = false]; * @return The locked. */ @java.lang.Override public boolean getLocked() { return locked_; } /** *
       * whether the procedure has held the lock
       * 
* * optional bool locked = 16 [default = false]; * @param value The locked to set. * @return This builder for chaining. */ public Builder setLocked(boolean value) { bitField0_ |= 0x00008000; locked_ = value; onChanged(); return this; } /** *
       * whether the procedure has held the lock
       * 
* * optional bool locked = 16 [default = false]; * @return This builder for chaining. */ public Builder clearLocked() { bitField0_ = (bitField0_ & ~0x00008000); locked_ = false; onChanged(); return this; } private boolean bypass_ ; /** *
       * whether the procedure need to be bypassed
       * 
* * optional bool bypass = 17 [default = false]; * @return Whether the bypass field is set. */ @java.lang.Override public boolean hasBypass() { return ((bitField0_ & 0x00010000) != 0); } /** *
       * whether the procedure need to be bypassed
       * 
* * optional bool bypass = 17 [default = false]; * @return The bypass. */ @java.lang.Override public boolean getBypass() { return bypass_; } /** *
       * whether the procedure need to be bypassed
       * 
* * optional bool bypass = 17 [default = false]; * @param value The bypass to set. * @return This builder for chaining. */ public Builder setBypass(boolean value) { bitField0_ |= 0x00010000; bypass_ = value; onChanged(); return this; } /** *
       * whether the procedure need to be bypassed
       * 
* * optional bool bypass = 17 [default = false]; * @return This builder for chaining. */ public Builder clearBypass() { bitField0_ = (bitField0_ & ~0x00010000); bypass_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.Procedure) } // @@protoc_insertion_point(class_scope:hbase.pb.Procedure) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser() { @java.lang.Override public Procedure parsePartialFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return new Procedure(input, extensionRegistry); } }; public static org.apache.hbase.thirdparty.com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hbase.thirdparty.com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface SequentialProcedureDataOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.SequentialProcedureData) org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder { /** * required bool executed = 1; * @return Whether the executed field is set. */ boolean hasExecuted(); /** * required bool executed = 1; * @return The executed. */ boolean getExecuted(); } /** *
   **
   * SequentialProcedure data
   * 
* * Protobuf type {@code hbase.pb.SequentialProcedureData} */ @javax.annotation.Generated("proto") public static final class SequentialProcedureData extends org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.SequentialProcedureData) SequentialProcedureDataOrBuilder { private static final long serialVersionUID = 0L; // Use SequentialProcedureData.newBuilder() to construct. private SequentialProcedureData(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private SequentialProcedureData() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new SequentialProcedureData(); } @java.lang.Override public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private SequentialProcedureData( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { bitField0_ |= 0x00000001; executed_ = input.readBool(); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_SequentialProcedureData_descriptor; } @java.lang.Override protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_SequentialProcedureData_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData.Builder.class); } private int bitField0_; public static final int EXECUTED_FIELD_NUMBER = 1; private boolean executed_; /** * required bool executed = 1; * @return Whether the executed field is set. */ @java.lang.Override public boolean hasExecuted() { return ((bitField0_ & 0x00000001) != 0); } /** * required bool executed = 1; * @return The executed. */ @java.lang.Override public boolean getExecuted() { return executed_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasExecuted()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeBool(1, executed_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeBoolSize(1, executed_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData) obj; if (hasExecuted() != other.hasExecuted()) return false; if (hasExecuted()) { if (getExecuted() != other.getExecuted()) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasExecuted()) { hash = (37 * hash) + EXECUTED_FIELD_NUMBER; hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashBoolean( getExecuted()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom( java.nio.ByteBuffer data) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom( java.nio.ByteBuffer data, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.ByteString data) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.ByteString data, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom(byte[] data) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom( byte[] data, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom( java.io.InputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData parseDelimitedFrom( java.io.InputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** *
     **
     * SequentialProcedure data
     * 
* * Protobuf type {@code hbase.pb.SequentialProcedureData} */ @javax.annotation.Generated("proto") public static final class Builder extends org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hbase.pb.SequentialProcedureData) org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureDataOrBuilder { public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_SequentialProcedureData_descriptor; } @java.lang.Override protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_SequentialProcedureData_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); executed_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_SequentialProcedureData_descriptor; } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.executed_ = executed_; to_bitField0_ |= 0x00000001; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData.getDefaultInstance()) return this; if (other.hasExecuted()) { setExecuted(other.getExecuted()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasExecuted()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private boolean executed_ ; /** * required bool executed = 1; * @return Whether the executed field is set. */ @java.lang.Override public boolean hasExecuted() { return ((bitField0_ & 0x00000001) != 0); } /** * required bool executed = 1; * @return The executed. */ @java.lang.Override public boolean getExecuted() { return executed_; } /** * required bool executed = 1; * @param value The executed to set. * @return This builder for chaining. */ public Builder setExecuted(boolean value) { bitField0_ |= 0x00000001; executed_ = value; onChanged(); return this; } /** * required bool executed = 1; * @return This builder for chaining. */ public Builder clearExecuted() { bitField0_ = (bitField0_ & ~0x00000001); executed_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.SequentialProcedureData) } // @@protoc_insertion_point(class_scope:hbase.pb.SequentialProcedureData) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser() { @java.lang.Override public SequentialProcedureData parsePartialFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return new SequentialProcedureData(input, extensionRegistry); } }; public static org.apache.hbase.thirdparty.com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hbase.thirdparty.com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface StateMachineProcedureDataOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.StateMachineProcedureData) org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder { /** * repeated uint32 state = 1; * @return A list containing the state. */ java.util.List getStateList(); /** * repeated uint32 state = 1; * @return The count of state. */ int getStateCount(); /** * repeated uint32 state = 1; * @param index The index of the element to return. * @return The state at the given index. */ int getState(int index); } /** *
   **
   * StateMachineProcedure data
   * 
* * Protobuf type {@code hbase.pb.StateMachineProcedureData} */ @javax.annotation.Generated("proto") public static final class StateMachineProcedureData extends org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.StateMachineProcedureData) StateMachineProcedureDataOrBuilder { private static final long serialVersionUID = 0L; // Use StateMachineProcedureData.newBuilder() to construct. private StateMachineProcedureData(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private StateMachineProcedureData() { state_ = emptyIntList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new StateMachineProcedureData(); } @java.lang.Override public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private StateMachineProcedureData( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { state_ = newIntList(); mutable_bitField0_ |= 0x00000001; } state_.addInt(input.readUInt32()); break; } case 10: { int length = input.readRawVarint32(); int limit = input.pushLimit(length); if (!((mutable_bitField0_ & 0x00000001) != 0) && input.getBytesUntilLimit() > 0) { state_ = newIntList(); mutable_bitField0_ |= 0x00000001; } while (input.getBytesUntilLimit() > 0) { state_.addInt(input.readUInt32()); } input.popLimit(limit); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { state_.makeImmutable(); // C } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_StateMachineProcedureData_descriptor; } @java.lang.Override protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_StateMachineProcedureData_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData.Builder.class); } public static final int STATE_FIELD_NUMBER = 1; private org.apache.hbase.thirdparty.com.google.protobuf.Internal.IntList state_; /** * repeated uint32 state = 1; * @return A list containing the state. */ @java.lang.Override public java.util.List getStateList() { return state_; } /** * repeated uint32 state = 1; * @return The count of state. */ public int getStateCount() { return state_.size(); } /** * repeated uint32 state = 1; * @param index The index of the element to return. * @return The state at the given index. */ public int getState(int index) { return state_.getInt(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < state_.size(); i++) { output.writeUInt32(1, state_.getInt(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < state_.size(); i++) { dataSize += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeUInt32SizeNoTag(state_.getInt(i)); } size += dataSize; size += 1 * getStateList().size(); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData) obj; if (!getStateList() .equals(other.getStateList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getStateCount() > 0) { hash = (37 * hash) + STATE_FIELD_NUMBER; hash = (53 * hash) + getStateList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData parseFrom( java.nio.ByteBuffer data) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData parseFrom( java.nio.ByteBuffer data, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.ByteString data) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.ByteString data, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData parseFrom(byte[] data) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData parseFrom( byte[] data, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData parseFrom( java.io.InputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData parseDelimitedFrom( java.io.InputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** *
     **
     * StateMachineProcedure data
     * 
* * Protobuf type {@code hbase.pb.StateMachineProcedureData} */ @javax.annotation.Generated("proto") public static final class Builder extends org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hbase.pb.StateMachineProcedureData) org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureDataOrBuilder { public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_StateMachineProcedureData_descriptor; } @java.lang.Override protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_StateMachineProcedureData_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); state_ = emptyIntList(); bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_StateMachineProcedureData_descriptor; } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData(this); int from_bitField0_ = bitField0_; if (((bitField0_ & 0x00000001) != 0)) { state_.makeImmutable(); bitField0_ = (bitField0_ & ~0x00000001); } result.state_ = state_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData.getDefaultInstance()) return this; if (!other.state_.isEmpty()) { if (state_.isEmpty()) { state_ = other.state_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureStateIsMutable(); state_.addAll(other.state_); } onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hbase.thirdparty.com.google.protobuf.Internal.IntList state_ = emptyIntList(); private void ensureStateIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { state_ = mutableCopy(state_); bitField0_ |= 0x00000001; } } /** * repeated uint32 state = 1; * @return A list containing the state. */ public java.util.List getStateList() { return ((bitField0_ & 0x00000001) != 0) ? java.util.Collections.unmodifiableList(state_) : state_; } /** * repeated uint32 state = 1; * @return The count of state. */ public int getStateCount() { return state_.size(); } /** * repeated uint32 state = 1; * @param index The index of the element to return. * @return The state at the given index. */ public int getState(int index) { return state_.getInt(index); } /** * repeated uint32 state = 1; * @param index The index to set the value at. * @param value The state to set. * @return This builder for chaining. */ public Builder setState( int index, int value) { ensureStateIsMutable(); state_.setInt(index, value); onChanged(); return this; } /** * repeated uint32 state = 1; * @param value The state to add. * @return This builder for chaining. */ public Builder addState(int value) { ensureStateIsMutable(); state_.addInt(value); onChanged(); return this; } /** * repeated uint32 state = 1; * @param values The state to add. * @return This builder for chaining. */ public Builder addAllState( java.lang.Iterable values) { ensureStateIsMutable(); org.apache.hbase.thirdparty.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, state_); onChanged(); return this; } /** * repeated uint32 state = 1; * @return This builder for chaining. */ public Builder clearState() { state_ = emptyIntList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.StateMachineProcedureData) } // @@protoc_insertion_point(class_scope:hbase.pb.StateMachineProcedureData) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser() { @java.lang.Override public StateMachineProcedureData parsePartialFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return new StateMachineProcedureData(input, extensionRegistry); } }; public static org.apache.hbase.thirdparty.com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hbase.thirdparty.com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ProcedureWALHeaderOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.ProcedureWALHeader) org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder { /** * required uint32 version = 1; * @return Whether the version field is set. */ boolean hasVersion(); /** * required uint32 version = 1; * @return The version. */ int getVersion(); /** * required uint32 type = 2; * @return Whether the type field is set. */ boolean hasType(); /** * required uint32 type = 2; * @return The type. */ int getType(); /** * required uint64 log_id = 3; * @return Whether the logId field is set. */ boolean hasLogId(); /** * required uint64 log_id = 3; * @return The logId. */ long getLogId(); /** * required uint64 min_proc_id = 4; * @return Whether the minProcId field is set. */ boolean hasMinProcId(); /** * required uint64 min_proc_id = 4; * @return The minProcId. */ long getMinProcId(); } /** *
   **
   * Procedure WAL header
   * 
* * Protobuf type {@code hbase.pb.ProcedureWALHeader} */ @javax.annotation.Generated("proto") public static final class ProcedureWALHeader extends org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.ProcedureWALHeader) ProcedureWALHeaderOrBuilder { private static final long serialVersionUID = 0L; // Use ProcedureWALHeader.newBuilder() to construct. private ProcedureWALHeader(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ProcedureWALHeader() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ProcedureWALHeader(); } @java.lang.Override public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ProcedureWALHeader( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { bitField0_ |= 0x00000001; version_ = input.readUInt32(); break; } case 16: { bitField0_ |= 0x00000002; type_ = input.readUInt32(); break; } case 24: { bitField0_ |= 0x00000004; logId_ = input.readUInt64(); break; } case 32: { bitField0_ |= 0x00000008; minProcId_ = input.readUInt64(); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALHeader_descriptor; } @java.lang.Override protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader.Builder.class); } private int bitField0_; public static final int VERSION_FIELD_NUMBER = 1; private int version_; /** * required uint32 version = 1; * @return Whether the version field is set. */ @java.lang.Override public boolean hasVersion() { return ((bitField0_ & 0x00000001) != 0); } /** * required uint32 version = 1; * @return The version. */ @java.lang.Override public int getVersion() { return version_; } public static final int TYPE_FIELD_NUMBER = 2; private int type_; /** * required uint32 type = 2; * @return Whether the type field is set. */ @java.lang.Override public boolean hasType() { return ((bitField0_ & 0x00000002) != 0); } /** * required uint32 type = 2; * @return The type. */ @java.lang.Override public int getType() { return type_; } public static final int LOG_ID_FIELD_NUMBER = 3; private long logId_; /** * required uint64 log_id = 3; * @return Whether the logId field is set. */ @java.lang.Override public boolean hasLogId() { return ((bitField0_ & 0x00000004) != 0); } /** * required uint64 log_id = 3; * @return The logId. */ @java.lang.Override public long getLogId() { return logId_; } public static final int MIN_PROC_ID_FIELD_NUMBER = 4; private long minProcId_; /** * required uint64 min_proc_id = 4; * @return Whether the minProcId field is set. */ @java.lang.Override public boolean hasMinProcId() { return ((bitField0_ & 0x00000008) != 0); } /** * required uint64 min_proc_id = 4; * @return The minProcId. */ @java.lang.Override public long getMinProcId() { return minProcId_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasVersion()) { memoizedIsInitialized = 0; return false; } if (!hasType()) { memoizedIsInitialized = 0; return false; } if (!hasLogId()) { memoizedIsInitialized = 0; return false; } if (!hasMinProcId()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeUInt32(1, version_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeUInt32(2, type_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeUInt64(3, logId_); } if (((bitField0_ & 0x00000008) != 0)) { output.writeUInt64(4, minProcId_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeUInt32Size(1, version_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeUInt32Size(2, type_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeUInt64Size(3, logId_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeUInt64Size(4, minProcId_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader) obj; if (hasVersion() != other.hasVersion()) return false; if (hasVersion()) { if (getVersion() != other.getVersion()) return false; } if (hasType() != other.hasType()) return false; if (hasType()) { if (getType() != other.getType()) return false; } if (hasLogId() != other.hasLogId()) return false; if (hasLogId()) { if (getLogId() != other.getLogId()) return false; } if (hasMinProcId() != other.hasMinProcId()) return false; if (hasMinProcId()) { if (getMinProcId() != other.getMinProcId()) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasVersion()) { hash = (37 * hash) + VERSION_FIELD_NUMBER; hash = (53 * hash) + getVersion(); } if (hasType()) { hash = (37 * hash) + TYPE_FIELD_NUMBER; hash = (53 * hash) + getType(); } if (hasLogId()) { hash = (37 * hash) + LOG_ID_FIELD_NUMBER; hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong( getLogId()); } if (hasMinProcId()) { hash = (37 * hash) + MIN_PROC_ID_FIELD_NUMBER; hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong( getMinProcId()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader parseFrom( java.nio.ByteBuffer data) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader parseFrom( java.nio.ByteBuffer data, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.ByteString data) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.ByteString data, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader parseFrom(byte[] data) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader parseFrom( byte[] data, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader parseFrom( java.io.InputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader parseDelimitedFrom( java.io.InputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** *
     **
     * Procedure WAL header
     * 
* * Protobuf type {@code hbase.pb.ProcedureWALHeader} */ @javax.annotation.Generated("proto") public static final class Builder extends org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hbase.pb.ProcedureWALHeader) org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeaderOrBuilder { public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALHeader_descriptor; } @java.lang.Override protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); version_ = 0; bitField0_ = (bitField0_ & ~0x00000001); type_ = 0; bitField0_ = (bitField0_ & ~0x00000002); logId_ = 0L; bitField0_ = (bitField0_ & ~0x00000004); minProcId_ = 0L; bitField0_ = (bitField0_ & ~0x00000008); return this; } @java.lang.Override public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALHeader_descriptor; } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.version_ = version_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.type_ = type_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.logId_ = logId_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.minProcId_ = minProcId_; to_bitField0_ |= 0x00000008; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader.getDefaultInstance()) return this; if (other.hasVersion()) { setVersion(other.getVersion()); } if (other.hasType()) { setType(other.getType()); } if (other.hasLogId()) { setLogId(other.getLogId()); } if (other.hasMinProcId()) { setMinProcId(other.getMinProcId()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasVersion()) { return false; } if (!hasType()) { return false; } if (!hasLogId()) { return false; } if (!hasMinProcId()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int version_ ; /** * required uint32 version = 1; * @return Whether the version field is set. */ @java.lang.Override public boolean hasVersion() { return ((bitField0_ & 0x00000001) != 0); } /** * required uint32 version = 1; * @return The version. */ @java.lang.Override public int getVersion() { return version_; } /** * required uint32 version = 1; * @param value The version to set. * @return This builder for chaining. */ public Builder setVersion(int value) { bitField0_ |= 0x00000001; version_ = value; onChanged(); return this; } /** * required uint32 version = 1; * @return This builder for chaining. */ public Builder clearVersion() { bitField0_ = (bitField0_ & ~0x00000001); version_ = 0; onChanged(); return this; } private int type_ ; /** * required uint32 type = 2; * @return Whether the type field is set. */ @java.lang.Override public boolean hasType() { return ((bitField0_ & 0x00000002) != 0); } /** * required uint32 type = 2; * @return The type. */ @java.lang.Override public int getType() { return type_; } /** * required uint32 type = 2; * @param value The type to set. * @return This builder for chaining. */ public Builder setType(int value) { bitField0_ |= 0x00000002; type_ = value; onChanged(); return this; } /** * required uint32 type = 2; * @return This builder for chaining. */ public Builder clearType() { bitField0_ = (bitField0_ & ~0x00000002); type_ = 0; onChanged(); return this; } private long logId_ ; /** * required uint64 log_id = 3; * @return Whether the logId field is set. */ @java.lang.Override public boolean hasLogId() { return ((bitField0_ & 0x00000004) != 0); } /** * required uint64 log_id = 3; * @return The logId. */ @java.lang.Override public long getLogId() { return logId_; } /** * required uint64 log_id = 3; * @param value The logId to set. * @return This builder for chaining. */ public Builder setLogId(long value) { bitField0_ |= 0x00000004; logId_ = value; onChanged(); return this; } /** * required uint64 log_id = 3; * @return This builder for chaining. */ public Builder clearLogId() { bitField0_ = (bitField0_ & ~0x00000004); logId_ = 0L; onChanged(); return this; } private long minProcId_ ; /** * required uint64 min_proc_id = 4; * @return Whether the minProcId field is set. */ @java.lang.Override public boolean hasMinProcId() { return ((bitField0_ & 0x00000008) != 0); } /** * required uint64 min_proc_id = 4; * @return The minProcId. */ @java.lang.Override public long getMinProcId() { return minProcId_; } /** * required uint64 min_proc_id = 4; * @param value The minProcId to set. * @return This builder for chaining. */ public Builder setMinProcId(long value) { bitField0_ |= 0x00000008; minProcId_ = value; onChanged(); return this; } /** * required uint64 min_proc_id = 4; * @return This builder for chaining. */ public Builder clearMinProcId() { bitField0_ = (bitField0_ & ~0x00000008); minProcId_ = 0L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.ProcedureWALHeader) } // @@protoc_insertion_point(class_scope:hbase.pb.ProcedureWALHeader) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser() { @java.lang.Override public ProcedureWALHeader parsePartialFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return new ProcedureWALHeader(input, extensionRegistry); } }; public static org.apache.hbase.thirdparty.com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hbase.thirdparty.com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ProcedureWALTrailerOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.ProcedureWALTrailer) org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder { /** * required uint32 version = 1; * @return Whether the version field is set. */ boolean hasVersion(); /** * required uint32 version = 1; * @return The version. */ int getVersion(); /** * required uint64 tracker_pos = 2; * @return Whether the trackerPos field is set. */ boolean hasTrackerPos(); /** * required uint64 tracker_pos = 2; * @return The trackerPos. */ long getTrackerPos(); } /** *
   **
   * Procedure WAL trailer
   * 
* * Protobuf type {@code hbase.pb.ProcedureWALTrailer} */ @javax.annotation.Generated("proto") public static final class ProcedureWALTrailer extends org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.ProcedureWALTrailer) ProcedureWALTrailerOrBuilder { private static final long serialVersionUID = 0L; // Use ProcedureWALTrailer.newBuilder() to construct. private ProcedureWALTrailer(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ProcedureWALTrailer() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ProcedureWALTrailer(); } @java.lang.Override public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ProcedureWALTrailer( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { bitField0_ |= 0x00000001; version_ = input.readUInt32(); break; } case 16: { bitField0_ |= 0x00000002; trackerPos_ = input.readUInt64(); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALTrailer_descriptor; } @java.lang.Override protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALTrailer_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer.Builder.class); } private int bitField0_; public static final int VERSION_FIELD_NUMBER = 1; private int version_; /** * required uint32 version = 1; * @return Whether the version field is set. */ @java.lang.Override public boolean hasVersion() { return ((bitField0_ & 0x00000001) != 0); } /** * required uint32 version = 1; * @return The version. */ @java.lang.Override public int getVersion() { return version_; } public static final int TRACKER_POS_FIELD_NUMBER = 2; private long trackerPos_; /** * required uint64 tracker_pos = 2; * @return Whether the trackerPos field is set. */ @java.lang.Override public boolean hasTrackerPos() { return ((bitField0_ & 0x00000002) != 0); } /** * required uint64 tracker_pos = 2; * @return The trackerPos. */ @java.lang.Override public long getTrackerPos() { return trackerPos_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasVersion()) { memoizedIsInitialized = 0; return false; } if (!hasTrackerPos()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeUInt32(1, version_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeUInt64(2, trackerPos_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeUInt32Size(1, version_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeUInt64Size(2, trackerPos_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer) obj; if (hasVersion() != other.hasVersion()) return false; if (hasVersion()) { if (getVersion() != other.getVersion()) return false; } if (hasTrackerPos() != other.hasTrackerPos()) return false; if (hasTrackerPos()) { if (getTrackerPos() != other.getTrackerPos()) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasVersion()) { hash = (37 * hash) + VERSION_FIELD_NUMBER; hash = (53 * hash) + getVersion(); } if (hasTrackerPos()) { hash = (37 * hash) + TRACKER_POS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong( getTrackerPos()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer parseFrom( java.nio.ByteBuffer data) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer parseFrom( java.nio.ByteBuffer data, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.ByteString data) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.ByteString data, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer parseFrom(byte[] data) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer parseFrom( byte[] data, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer parseFrom( java.io.InputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer parseDelimitedFrom( java.io.InputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** *
     **
     * Procedure WAL trailer
     * 
* * Protobuf type {@code hbase.pb.ProcedureWALTrailer} */ @javax.annotation.Generated("proto") public static final class Builder extends org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hbase.pb.ProcedureWALTrailer) org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailerOrBuilder { public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALTrailer_descriptor; } @java.lang.Override protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALTrailer_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); version_ = 0; bitField0_ = (bitField0_ & ~0x00000001); trackerPos_ = 0L; bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALTrailer_descriptor; } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.version_ = version_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.trackerPos_ = trackerPos_; to_bitField0_ |= 0x00000002; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer.getDefaultInstance()) return this; if (other.hasVersion()) { setVersion(other.getVersion()); } if (other.hasTrackerPos()) { setTrackerPos(other.getTrackerPos()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasVersion()) { return false; } if (!hasTrackerPos()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int version_ ; /** * required uint32 version = 1; * @return Whether the version field is set. */ @java.lang.Override public boolean hasVersion() { return ((bitField0_ & 0x00000001) != 0); } /** * required uint32 version = 1; * @return The version. */ @java.lang.Override public int getVersion() { return version_; } /** * required uint32 version = 1; * @param value The version to set. * @return This builder for chaining. */ public Builder setVersion(int value) { bitField0_ |= 0x00000001; version_ = value; onChanged(); return this; } /** * required uint32 version = 1; * @return This builder for chaining. */ public Builder clearVersion() { bitField0_ = (bitField0_ & ~0x00000001); version_ = 0; onChanged(); return this; } private long trackerPos_ ; /** * required uint64 tracker_pos = 2; * @return Whether the trackerPos field is set. */ @java.lang.Override public boolean hasTrackerPos() { return ((bitField0_ & 0x00000002) != 0); } /** * required uint64 tracker_pos = 2; * @return The trackerPos. */ @java.lang.Override public long getTrackerPos() { return trackerPos_; } /** * required uint64 tracker_pos = 2; * @param value The trackerPos to set. * @return This builder for chaining. */ public Builder setTrackerPos(long value) { bitField0_ |= 0x00000002; trackerPos_ = value; onChanged(); return this; } /** * required uint64 tracker_pos = 2; * @return This builder for chaining. */ public Builder clearTrackerPos() { bitField0_ = (bitField0_ & ~0x00000002); trackerPos_ = 0L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.ProcedureWALTrailer) } // @@protoc_insertion_point(class_scope:hbase.pb.ProcedureWALTrailer) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser() { @java.lang.Override public ProcedureWALTrailer parsePartialFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return new ProcedureWALTrailer(input, extensionRegistry); } }; public static org.apache.hbase.thirdparty.com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hbase.thirdparty.com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ProcedureStoreTrackerOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.ProcedureStoreTracker) org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder { /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ java.util.List getNodeList(); /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode getNode(int index); /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ int getNodeCount(); /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ java.util.List getNodeOrBuilderList(); /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNodeOrBuilder getNodeOrBuilder( int index); } /** * Protobuf type {@code hbase.pb.ProcedureStoreTracker} */ @javax.annotation.Generated("proto") public static final class ProcedureStoreTracker extends org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.ProcedureStoreTracker) ProcedureStoreTrackerOrBuilder { private static final long serialVersionUID = 0L; // Use ProcedureStoreTracker.newBuilder() to construct. private ProcedureStoreTracker(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ProcedureStoreTracker() { node_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ProcedureStoreTracker(); } @java.lang.Override public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ProcedureStoreTracker( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { node_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } node_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { node_ = java.util.Collections.unmodifiableList(node_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureStoreTracker_descriptor; } @java.lang.Override protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureStoreTracker_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.Builder.class); } public interface TrackerNodeOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.ProcedureStoreTracker.TrackerNode) org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder { /** * required uint64 start_id = 1; * @return Whether the startId field is set. */ boolean hasStartId(); /** * required uint64 start_id = 1; * @return The startId. */ long getStartId(); /** * repeated uint64 updated = 2; * @return A list containing the updated. */ java.util.List getUpdatedList(); /** * repeated uint64 updated = 2; * @return The count of updated. */ int getUpdatedCount(); /** * repeated uint64 updated = 2; * @param index The index of the element to return. * @return The updated at the given index. */ long getUpdated(int index); /** * repeated uint64 deleted = 3; * @return A list containing the deleted. */ java.util.List getDeletedList(); /** * repeated uint64 deleted = 3; * @return The count of deleted. */ int getDeletedCount(); /** * repeated uint64 deleted = 3; * @param index The index of the element to return. * @return The deleted at the given index. */ long getDeleted(int index); } /** * Protobuf type {@code hbase.pb.ProcedureStoreTracker.TrackerNode} */ @javax.annotation.Generated("proto") public static final class TrackerNode extends org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.ProcedureStoreTracker.TrackerNode) TrackerNodeOrBuilder { private static final long serialVersionUID = 0L; // Use TrackerNode.newBuilder() to construct. private TrackerNode(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private TrackerNode() { updated_ = emptyLongList(); deleted_ = emptyLongList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new TrackerNode(); } @java.lang.Override public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private TrackerNode( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { bitField0_ |= 0x00000001; startId_ = input.readUInt64(); break; } case 16: { if (!((mutable_bitField0_ & 0x00000002) != 0)) { updated_ = newLongList(); mutable_bitField0_ |= 0x00000002; } updated_.addLong(input.readUInt64()); break; } case 18: { int length = input.readRawVarint32(); int limit = input.pushLimit(length); if (!((mutable_bitField0_ & 0x00000002) != 0) && input.getBytesUntilLimit() > 0) { updated_ = newLongList(); mutable_bitField0_ |= 0x00000002; } while (input.getBytesUntilLimit() > 0) { updated_.addLong(input.readUInt64()); } input.popLimit(limit); break; } case 24: { if (!((mutable_bitField0_ & 0x00000004) != 0)) { deleted_ = newLongList(); mutable_bitField0_ |= 0x00000004; } deleted_.addLong(input.readUInt64()); break; } case 26: { int length = input.readRawVarint32(); int limit = input.pushLimit(length); if (!((mutable_bitField0_ & 0x00000004) != 0) && input.getBytesUntilLimit() > 0) { deleted_ = newLongList(); mutable_bitField0_ |= 0x00000004; } while (input.getBytesUntilLimit() > 0) { deleted_.addLong(input.readUInt64()); } input.popLimit(limit); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) != 0)) { updated_.makeImmutable(); // C } if (((mutable_bitField0_ & 0x00000004) != 0)) { deleted_.makeImmutable(); // C } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureStoreTracker_TrackerNode_descriptor; } @java.lang.Override protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureStoreTracker_TrackerNode_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.Builder.class); } private int bitField0_; public static final int START_ID_FIELD_NUMBER = 1; private long startId_; /** * required uint64 start_id = 1; * @return Whether the startId field is set. */ @java.lang.Override public boolean hasStartId() { return ((bitField0_ & 0x00000001) != 0); } /** * required uint64 start_id = 1; * @return The startId. */ @java.lang.Override public long getStartId() { return startId_; } public static final int UPDATED_FIELD_NUMBER = 2; private org.apache.hbase.thirdparty.com.google.protobuf.Internal.LongList updated_; /** * repeated uint64 updated = 2; * @return A list containing the updated. */ @java.lang.Override public java.util.List getUpdatedList() { return updated_; } /** * repeated uint64 updated = 2; * @return The count of updated. */ public int getUpdatedCount() { return updated_.size(); } /** * repeated uint64 updated = 2; * @param index The index of the element to return. * @return The updated at the given index. */ public long getUpdated(int index) { return updated_.getLong(index); } public static final int DELETED_FIELD_NUMBER = 3; private org.apache.hbase.thirdparty.com.google.protobuf.Internal.LongList deleted_; /** * repeated uint64 deleted = 3; * @return A list containing the deleted. */ @java.lang.Override public java.util.List getDeletedList() { return deleted_; } /** * repeated uint64 deleted = 3; * @return The count of deleted. */ public int getDeletedCount() { return deleted_.size(); } /** * repeated uint64 deleted = 3; * @param index The index of the element to return. * @return The deleted at the given index. */ public long getDeleted(int index) { return deleted_.getLong(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasStartId()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeUInt64(1, startId_); } for (int i = 0; i < updated_.size(); i++) { output.writeUInt64(2, updated_.getLong(i)); } for (int i = 0; i < deleted_.size(); i++) { output.writeUInt64(3, deleted_.getLong(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeUInt64Size(1, startId_); } { int dataSize = 0; for (int i = 0; i < updated_.size(); i++) { dataSize += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeUInt64SizeNoTag(updated_.getLong(i)); } size += dataSize; size += 1 * getUpdatedList().size(); } { int dataSize = 0; for (int i = 0; i < deleted_.size(); i++) { dataSize += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeUInt64SizeNoTag(deleted_.getLong(i)); } size += dataSize; size += 1 * getDeletedList().size(); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode) obj; if (hasStartId() != other.hasStartId()) return false; if (hasStartId()) { if (getStartId() != other.getStartId()) return false; } if (!getUpdatedList() .equals(other.getUpdatedList())) return false; if (!getDeletedList() .equals(other.getDeletedList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasStartId()) { hash = (37 * hash) + START_ID_FIELD_NUMBER; hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong( getStartId()); } if (getUpdatedCount() > 0) { hash = (37 * hash) + UPDATED_FIELD_NUMBER; hash = (53 * hash) + getUpdatedList().hashCode(); } if (getDeletedCount() > 0) { hash = (37 * hash) + DELETED_FIELD_NUMBER; hash = (53 * hash) + getDeletedList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode parseFrom( java.nio.ByteBuffer data) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode parseFrom( java.nio.ByteBuffer data, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.ByteString data) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.ByteString data, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode parseFrom(byte[] data) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode parseFrom( byte[] data, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode parseFrom( java.io.InputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode parseDelimitedFrom( java.io.InputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.ProcedureStoreTracker.TrackerNode} */ @javax.annotation.Generated("proto") public static final class Builder extends org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hbase.pb.ProcedureStoreTracker.TrackerNode) org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNodeOrBuilder { public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureStoreTracker_TrackerNode_descriptor; } @java.lang.Override protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureStoreTracker_TrackerNode_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); startId_ = 0L; bitField0_ = (bitField0_ & ~0x00000001); updated_ = emptyLongList(); bitField0_ = (bitField0_ & ~0x00000002); deleted_ = emptyLongList(); bitField0_ = (bitField0_ & ~0x00000004); return this; } @java.lang.Override public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureStoreTracker_TrackerNode_descriptor; } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.startId_ = startId_; to_bitField0_ |= 0x00000001; } if (((bitField0_ & 0x00000002) != 0)) { updated_.makeImmutable(); bitField0_ = (bitField0_ & ~0x00000002); } result.updated_ = updated_; if (((bitField0_ & 0x00000004) != 0)) { deleted_.makeImmutable(); bitField0_ = (bitField0_ & ~0x00000004); } result.deleted_ = deleted_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.getDefaultInstance()) return this; if (other.hasStartId()) { setStartId(other.getStartId()); } if (!other.updated_.isEmpty()) { if (updated_.isEmpty()) { updated_ = other.updated_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureUpdatedIsMutable(); updated_.addAll(other.updated_); } onChanged(); } if (!other.deleted_.isEmpty()) { if (deleted_.isEmpty()) { deleted_ = other.deleted_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureDeletedIsMutable(); deleted_.addAll(other.deleted_); } onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasStartId()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private long startId_ ; /** * required uint64 start_id = 1; * @return Whether the startId field is set. */ @java.lang.Override public boolean hasStartId() { return ((bitField0_ & 0x00000001) != 0); } /** * required uint64 start_id = 1; * @return The startId. */ @java.lang.Override public long getStartId() { return startId_; } /** * required uint64 start_id = 1; * @param value The startId to set. * @return This builder for chaining. */ public Builder setStartId(long value) { bitField0_ |= 0x00000001; startId_ = value; onChanged(); return this; } /** * required uint64 start_id = 1; * @return This builder for chaining. */ public Builder clearStartId() { bitField0_ = (bitField0_ & ~0x00000001); startId_ = 0L; onChanged(); return this; } private org.apache.hbase.thirdparty.com.google.protobuf.Internal.LongList updated_ = emptyLongList(); private void ensureUpdatedIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { updated_ = mutableCopy(updated_); bitField0_ |= 0x00000002; } } /** * repeated uint64 updated = 2; * @return A list containing the updated. */ public java.util.List getUpdatedList() { return ((bitField0_ & 0x00000002) != 0) ? java.util.Collections.unmodifiableList(updated_) : updated_; } /** * repeated uint64 updated = 2; * @return The count of updated. */ public int getUpdatedCount() { return updated_.size(); } /** * repeated uint64 updated = 2; * @param index The index of the element to return. * @return The updated at the given index. */ public long getUpdated(int index) { return updated_.getLong(index); } /** * repeated uint64 updated = 2; * @param index The index to set the value at. * @param value The updated to set. * @return This builder for chaining. */ public Builder setUpdated( int index, long value) { ensureUpdatedIsMutable(); updated_.setLong(index, value); onChanged(); return this; } /** * repeated uint64 updated = 2; * @param value The updated to add. * @return This builder for chaining. */ public Builder addUpdated(long value) { ensureUpdatedIsMutable(); updated_.addLong(value); onChanged(); return this; } /** * repeated uint64 updated = 2; * @param values The updated to add. * @return This builder for chaining. */ public Builder addAllUpdated( java.lang.Iterable values) { ensureUpdatedIsMutable(); org.apache.hbase.thirdparty.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, updated_); onChanged(); return this; } /** * repeated uint64 updated = 2; * @return This builder for chaining. */ public Builder clearUpdated() { updated_ = emptyLongList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } private org.apache.hbase.thirdparty.com.google.protobuf.Internal.LongList deleted_ = emptyLongList(); private void ensureDeletedIsMutable() { if (!((bitField0_ & 0x00000004) != 0)) { deleted_ = mutableCopy(deleted_); bitField0_ |= 0x00000004; } } /** * repeated uint64 deleted = 3; * @return A list containing the deleted. */ public java.util.List getDeletedList() { return ((bitField0_ & 0x00000004) != 0) ? java.util.Collections.unmodifiableList(deleted_) : deleted_; } /** * repeated uint64 deleted = 3; * @return The count of deleted. */ public int getDeletedCount() { return deleted_.size(); } /** * repeated uint64 deleted = 3; * @param index The index of the element to return. * @return The deleted at the given index. */ public long getDeleted(int index) { return deleted_.getLong(index); } /** * repeated uint64 deleted = 3; * @param index The index to set the value at. * @param value The deleted to set. * @return This builder for chaining. */ public Builder setDeleted( int index, long value) { ensureDeletedIsMutable(); deleted_.setLong(index, value); onChanged(); return this; } /** * repeated uint64 deleted = 3; * @param value The deleted to add. * @return This builder for chaining. */ public Builder addDeleted(long value) { ensureDeletedIsMutable(); deleted_.addLong(value); onChanged(); return this; } /** * repeated uint64 deleted = 3; * @param values The deleted to add. * @return This builder for chaining. */ public Builder addAllDeleted( java.lang.Iterable values) { ensureDeletedIsMutable(); org.apache.hbase.thirdparty.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, deleted_); onChanged(); return this; } /** * repeated uint64 deleted = 3; * @return This builder for chaining. */ public Builder clearDeleted() { deleted_ = emptyLongList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.ProcedureStoreTracker.TrackerNode) } // @@protoc_insertion_point(class_scope:hbase.pb.ProcedureStoreTracker.TrackerNode) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser() { @java.lang.Override public TrackerNode parsePartialFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return new TrackerNode(input, extensionRegistry); } }; public static org.apache.hbase.thirdparty.com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hbase.thirdparty.com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public static final int NODE_FIELD_NUMBER = 1; private java.util.List node_; /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ @java.lang.Override public java.util.List getNodeList() { return node_; } /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ @java.lang.Override public java.util.List getNodeOrBuilderList() { return node_; } /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ @java.lang.Override public int getNodeCount() { return node_.size(); } /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode getNode(int index) { return node_.get(index); } /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNodeOrBuilder getNodeOrBuilder( int index) { return node_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getNodeCount(); i++) { if (!getNode(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < node_.size(); i++) { output.writeMessage(1, node_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < node_.size(); i++) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeMessageSize(1, node_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker) obj; if (!getNodeList() .equals(other.getNodeList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getNodeCount() > 0) { hash = (37 * hash) + NODE_FIELD_NUMBER; hash = (53 * hash) + getNodeList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker parseFrom( java.nio.ByteBuffer data) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker parseFrom( java.nio.ByteBuffer data, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.ByteString data) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.ByteString data, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker parseFrom(byte[] data) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker parseFrom( byte[] data, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker parseFrom( java.io.InputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker parseDelimitedFrom( java.io.InputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.ProcedureStoreTracker} */ @javax.annotation.Generated("proto") public static final class Builder extends org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hbase.pb.ProcedureStoreTracker) org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTrackerOrBuilder { public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureStoreTracker_descriptor; } @java.lang.Override protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureStoreTracker_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getNodeFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (nodeBuilder_ == null) { node_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { nodeBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureStoreTracker_descriptor; } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker(this); int from_bitField0_ = bitField0_; if (nodeBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { node_ = java.util.Collections.unmodifiableList(node_); bitField0_ = (bitField0_ & ~0x00000001); } result.node_ = node_; } else { result.node_ = nodeBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.getDefaultInstance()) return this; if (nodeBuilder_ == null) { if (!other.node_.isEmpty()) { if (node_.isEmpty()) { node_ = other.node_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureNodeIsMutable(); node_.addAll(other.node_); } onChanged(); } } else { if (!other.node_.isEmpty()) { if (nodeBuilder_.isEmpty()) { nodeBuilder_.dispose(); nodeBuilder_ = null; node_ = other.node_; bitField0_ = (bitField0_ & ~0x00000001); nodeBuilder_ = org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getNodeFieldBuilder() : null; } else { nodeBuilder_.addAllMessages(other.node_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { for (int i = 0; i < getNodeCount(); i++) { if (!getNode(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List node_ = java.util.Collections.emptyList(); private void ensureNodeIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { node_ = new java.util.ArrayList(node_); bitField0_ |= 0x00000001; } } private org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNodeOrBuilder> nodeBuilder_; /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ public java.util.List getNodeList() { if (nodeBuilder_ == null) { return java.util.Collections.unmodifiableList(node_); } else { return nodeBuilder_.getMessageList(); } } /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ public int getNodeCount() { if (nodeBuilder_ == null) { return node_.size(); } else { return nodeBuilder_.getCount(); } } /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode getNode(int index) { if (nodeBuilder_ == null) { return node_.get(index); } else { return nodeBuilder_.getMessage(index); } } /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ public Builder setNode( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode value) { if (nodeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeIsMutable(); node_.set(index, value); onChanged(); } else { nodeBuilder_.setMessage(index, value); } return this; } /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ public Builder setNode( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.Builder builderForValue) { if (nodeBuilder_ == null) { ensureNodeIsMutable(); node_.set(index, builderForValue.build()); onChanged(); } else { nodeBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ public Builder addNode(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode value) { if (nodeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeIsMutable(); node_.add(value); onChanged(); } else { nodeBuilder_.addMessage(value); } return this; } /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ public Builder addNode( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode value) { if (nodeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeIsMutable(); node_.add(index, value); onChanged(); } else { nodeBuilder_.addMessage(index, value); } return this; } /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ public Builder addNode( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.Builder builderForValue) { if (nodeBuilder_ == null) { ensureNodeIsMutable(); node_.add(builderForValue.build()); onChanged(); } else { nodeBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ public Builder addNode( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.Builder builderForValue) { if (nodeBuilder_ == null) { ensureNodeIsMutable(); node_.add(index, builderForValue.build()); onChanged(); } else { nodeBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ public Builder addAllNode( java.lang.Iterable values) { if (nodeBuilder_ == null) { ensureNodeIsMutable(); org.apache.hbase.thirdparty.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, node_); onChanged(); } else { nodeBuilder_.addAllMessages(values); } return this; } /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ public Builder clearNode() { if (nodeBuilder_ == null) { node_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { nodeBuilder_.clear(); } return this; } /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ public Builder removeNode(int index) { if (nodeBuilder_ == null) { ensureNodeIsMutable(); node_.remove(index); onChanged(); } else { nodeBuilder_.remove(index); } return this; } /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.Builder getNodeBuilder( int index) { return getNodeFieldBuilder().getBuilder(index); } /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNodeOrBuilder getNodeOrBuilder( int index) { if (nodeBuilder_ == null) { return node_.get(index); } else { return nodeBuilder_.getMessageOrBuilder(index); } } /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ public java.util.List getNodeOrBuilderList() { if (nodeBuilder_ != null) { return nodeBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(node_); } } /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.Builder addNodeBuilder() { return getNodeFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.getDefaultInstance()); } /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.Builder addNodeBuilder( int index) { return getNodeFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.getDefaultInstance()); } /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ public java.util.List getNodeBuilderList() { return getNodeFieldBuilder().getBuilderList(); } private org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNodeOrBuilder> getNodeFieldBuilder() { if (nodeBuilder_ == null) { nodeBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNodeOrBuilder>( node_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); node_ = null; } return nodeBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.ProcedureStoreTracker) } // @@protoc_insertion_point(class_scope:hbase.pb.ProcedureStoreTracker) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser() { @java.lang.Override public ProcedureStoreTracker parsePartialFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return new ProcedureStoreTracker(input, extensionRegistry); } }; public static org.apache.hbase.thirdparty.com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hbase.thirdparty.com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ProcedureWALEntryOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.ProcedureWALEntry) org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder { /** * required .hbase.pb.ProcedureWALEntry.Type type = 1; * @return Whether the type field is set. */ boolean hasType(); /** * required .hbase.pb.ProcedureWALEntry.Type type = 1; * @return The type. */ org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type getType(); /** * repeated .hbase.pb.Procedure procedure = 2; */ java.util.List getProcedureList(); /** * repeated .hbase.pb.Procedure procedure = 2; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure getProcedure(int index); /** * repeated .hbase.pb.Procedure procedure = 2; */ int getProcedureCount(); /** * repeated .hbase.pb.Procedure procedure = 2; */ java.util.List getProcedureOrBuilderList(); /** * repeated .hbase.pb.Procedure procedure = 2; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getProcedureOrBuilder( int index); /** * optional uint64 proc_id = 3; * @return Whether the procId field is set. */ boolean hasProcId(); /** * optional uint64 proc_id = 3; * @return The procId. */ long getProcId(); /** * repeated uint64 child_id = 4; * @return A list containing the childId. */ java.util.List getChildIdList(); /** * repeated uint64 child_id = 4; * @return The count of childId. */ int getChildIdCount(); /** * repeated uint64 child_id = 4; * @param index The index of the element to return. * @return The childId at the given index. */ long getChildId(int index); } /** * Protobuf type {@code hbase.pb.ProcedureWALEntry} */ @javax.annotation.Generated("proto") public static final class ProcedureWALEntry extends org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.ProcedureWALEntry) ProcedureWALEntryOrBuilder { private static final long serialVersionUID = 0L; // Use ProcedureWALEntry.newBuilder() to construct. private ProcedureWALEntry(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ProcedureWALEntry() { type_ = 1; procedure_ = java.util.Collections.emptyList(); childId_ = emptyLongList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ProcedureWALEntry(); } @java.lang.Override public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ProcedureWALEntry( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int rawValue = input.readEnum(); @SuppressWarnings("deprecation") org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type value = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; type_ = rawValue; } break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) != 0)) { procedure_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } procedure_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.PARSER, extensionRegistry)); break; } case 24: { bitField0_ |= 0x00000002; procId_ = input.readUInt64(); break; } case 32: { if (!((mutable_bitField0_ & 0x00000008) != 0)) { childId_ = newLongList(); mutable_bitField0_ |= 0x00000008; } childId_.addLong(input.readUInt64()); break; } case 34: { int length = input.readRawVarint32(); int limit = input.pushLimit(length); if (!((mutable_bitField0_ & 0x00000008) != 0) && input.getBytesUntilLimit() > 0) { childId_ = newLongList(); mutable_bitField0_ |= 0x00000008; } while (input.getBytesUntilLimit() > 0) { childId_.addLong(input.readUInt64()); } input.popLimit(limit); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) != 0)) { procedure_ = java.util.Collections.unmodifiableList(procedure_); } if (((mutable_bitField0_ & 0x00000008) != 0)) { childId_.makeImmutable(); // C } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALEntry_descriptor; } @java.lang.Override protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALEntry_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Builder.class); } /** * Protobuf enum {@code hbase.pb.ProcedureWALEntry.Type} */ public enum Type implements org.apache.hbase.thirdparty.com.google.protobuf.ProtocolMessageEnum { /** * PROCEDURE_WAL_EOF = 1; */ PROCEDURE_WAL_EOF(1), /** * PROCEDURE_WAL_INIT = 2; */ PROCEDURE_WAL_INIT(2), /** * PROCEDURE_WAL_INSERT = 3; */ PROCEDURE_WAL_INSERT(3), /** * PROCEDURE_WAL_UPDATE = 4; */ PROCEDURE_WAL_UPDATE(4), /** * PROCEDURE_WAL_DELETE = 5; */ PROCEDURE_WAL_DELETE(5), /** * PROCEDURE_WAL_COMPACT = 6; */ PROCEDURE_WAL_COMPACT(6), ; /** * PROCEDURE_WAL_EOF = 1; */ public static final int PROCEDURE_WAL_EOF_VALUE = 1; /** * PROCEDURE_WAL_INIT = 2; */ public static final int PROCEDURE_WAL_INIT_VALUE = 2; /** * PROCEDURE_WAL_INSERT = 3; */ public static final int PROCEDURE_WAL_INSERT_VALUE = 3; /** * PROCEDURE_WAL_UPDATE = 4; */ public static final int PROCEDURE_WAL_UPDATE_VALUE = 4; /** * PROCEDURE_WAL_DELETE = 5; */ public static final int PROCEDURE_WAL_DELETE_VALUE = 5; /** * PROCEDURE_WAL_COMPACT = 6; */ public static final int PROCEDURE_WAL_COMPACT_VALUE = 6; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static Type valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static Type forNumber(int value) { switch (value) { case 1: return PROCEDURE_WAL_EOF; case 2: return PROCEDURE_WAL_INIT; case 3: return PROCEDURE_WAL_INSERT; case 4: return PROCEDURE_WAL_UPDATE; case 5: return PROCEDURE_WAL_DELETE; case 6: return PROCEDURE_WAL_COMPACT; default: return null; } } public static org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap< Type> internalValueMap = new org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap() { public Type findValueByNumber(int number) { return Type.forNumber(number); } }; public final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.getDescriptor().getEnumTypes().get(0); } private static final Type[] VALUES = values(); public static Type valueOf( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private Type(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.ProcedureWALEntry.Type) } private int bitField0_; public static final int TYPE_FIELD_NUMBER = 1; private int type_; /** * required .hbase.pb.ProcedureWALEntry.Type type = 1; * @return Whether the type field is set. */ @java.lang.Override public boolean hasType() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hbase.pb.ProcedureWALEntry.Type type = 1; * @return The type. */ @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type getType() { @SuppressWarnings("deprecation") org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type result = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type.valueOf(type_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type.PROCEDURE_WAL_EOF : result; } public static final int PROCEDURE_FIELD_NUMBER = 2; private java.util.List procedure_; /** * repeated .hbase.pb.Procedure procedure = 2; */ @java.lang.Override public java.util.List getProcedureList() { return procedure_; } /** * repeated .hbase.pb.Procedure procedure = 2; */ @java.lang.Override public java.util.List getProcedureOrBuilderList() { return procedure_; } /** * repeated .hbase.pb.Procedure procedure = 2; */ @java.lang.Override public int getProcedureCount() { return procedure_.size(); } /** * repeated .hbase.pb.Procedure procedure = 2; */ @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure getProcedure(int index) { return procedure_.get(index); } /** * repeated .hbase.pb.Procedure procedure = 2; */ @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getProcedureOrBuilder( int index) { return procedure_.get(index); } public static final int PROC_ID_FIELD_NUMBER = 3; private long procId_; /** * optional uint64 proc_id = 3; * @return Whether the procId field is set. */ @java.lang.Override public boolean hasProcId() { return ((bitField0_ & 0x00000002) != 0); } /** * optional uint64 proc_id = 3; * @return The procId. */ @java.lang.Override public long getProcId() { return procId_; } public static final int CHILD_ID_FIELD_NUMBER = 4; private org.apache.hbase.thirdparty.com.google.protobuf.Internal.LongList childId_; /** * repeated uint64 child_id = 4; * @return A list containing the childId. */ @java.lang.Override public java.util.List getChildIdList() { return childId_; } /** * repeated uint64 child_id = 4; * @return The count of childId. */ public int getChildIdCount() { return childId_.size(); } /** * repeated uint64 child_id = 4; * @param index The index of the element to return. * @return The childId at the given index. */ public long getChildId(int index) { return childId_.getLong(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasType()) { memoizedIsInitialized = 0; return false; } for (int i = 0; i < getProcedureCount(); i++) { if (!getProcedure(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeEnum(1, type_); } for (int i = 0; i < procedure_.size(); i++) { output.writeMessage(2, procedure_.get(i)); } if (((bitField0_ & 0x00000002) != 0)) { output.writeUInt64(3, procId_); } for (int i = 0; i < childId_.size(); i++) { output.writeUInt64(4, childId_.getLong(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeEnumSize(1, type_); } for (int i = 0; i < procedure_.size(); i++) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeMessageSize(2, procedure_.get(i)); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeUInt64Size(3, procId_); } { int dataSize = 0; for (int i = 0; i < childId_.size(); i++) { dataSize += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream .computeUInt64SizeNoTag(childId_.getLong(i)); } size += dataSize; size += 1 * getChildIdList().size(); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry) obj; if (hasType() != other.hasType()) return false; if (hasType()) { if (type_ != other.type_) return false; } if (!getProcedureList() .equals(other.getProcedureList())) return false; if (hasProcId() != other.hasProcId()) return false; if (hasProcId()) { if (getProcId() != other.getProcId()) return false; } if (!getChildIdList() .equals(other.getChildIdList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasType()) { hash = (37 * hash) + TYPE_FIELD_NUMBER; hash = (53 * hash) + type_; } if (getProcedureCount() > 0) { hash = (37 * hash) + PROCEDURE_FIELD_NUMBER; hash = (53 * hash) + getProcedureList().hashCode(); } if (hasProcId()) { hash = (37 * hash) + PROC_ID_FIELD_NUMBER; hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong( getProcId()); } if (getChildIdCount() > 0) { hash = (37 * hash) + CHILD_ID_FIELD_NUMBER; hash = (53 * hash) + getChildIdList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry parseFrom( java.nio.ByteBuffer data) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry parseFrom( java.nio.ByteBuffer data, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.ByteString data) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.ByteString data, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry parseFrom(byte[] data) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry parseFrom( byte[] data, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry parseFrom( java.io.InputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry parseDelimitedFrom( java.io.InputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry parseFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.ProcedureWALEntry} */ @javax.annotation.Generated("proto") public static final class Builder extends org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hbase.pb.ProcedureWALEntry) org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntryOrBuilder { public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALEntry_descriptor; } @java.lang.Override protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALEntry_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getProcedureFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); type_ = 1; bitField0_ = (bitField0_ & ~0x00000001); if (procedureBuilder_ == null) { procedure_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { procedureBuilder_.clear(); } procId_ = 0L; bitField0_ = (bitField0_ & ~0x00000004); childId_ = emptyLongList(); bitField0_ = (bitField0_ & ~0x00000008); return this; } @java.lang.Override public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALEntry_descriptor; } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { to_bitField0_ |= 0x00000001; } result.type_ = type_; if (procedureBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { procedure_ = java.util.Collections.unmodifiableList(procedure_); bitField0_ = (bitField0_ & ~0x00000002); } result.procedure_ = procedure_; } else { result.procedure_ = procedureBuilder_.build(); } if (((from_bitField0_ & 0x00000004) != 0)) { result.procId_ = procId_; to_bitField0_ |= 0x00000002; } if (((bitField0_ & 0x00000008) != 0)) { childId_.makeImmutable(); bitField0_ = (bitField0_ & ~0x00000008); } result.childId_ = childId_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.getDefaultInstance()) return this; if (other.hasType()) { setType(other.getType()); } if (procedureBuilder_ == null) { if (!other.procedure_.isEmpty()) { if (procedure_.isEmpty()) { procedure_ = other.procedure_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureProcedureIsMutable(); procedure_.addAll(other.procedure_); } onChanged(); } } else { if (!other.procedure_.isEmpty()) { if (procedureBuilder_.isEmpty()) { procedureBuilder_.dispose(); procedureBuilder_ = null; procedure_ = other.procedure_; bitField0_ = (bitField0_ & ~0x00000002); procedureBuilder_ = org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getProcedureFieldBuilder() : null; } else { procedureBuilder_.addAllMessages(other.procedure_); } } } if (other.hasProcId()) { setProcId(other.getProcId()); } if (!other.childId_.isEmpty()) { if (childId_.isEmpty()) { childId_ = other.childId_; bitField0_ = (bitField0_ & ~0x00000008); } else { ensureChildIdIsMutable(); childId_.addAll(other.childId_); } onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasType()) { return false; } for (int i = 0; i < getProcedureCount(); i++) { if (!getProcedure(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int type_ = 1; /** * required .hbase.pb.ProcedureWALEntry.Type type = 1; * @return Whether the type field is set. */ @java.lang.Override public boolean hasType() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hbase.pb.ProcedureWALEntry.Type type = 1; * @return The type. */ @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type getType() { @SuppressWarnings("deprecation") org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type result = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type.valueOf(type_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type.PROCEDURE_WAL_EOF : result; } /** * required .hbase.pb.ProcedureWALEntry.Type type = 1; * @param value The type to set. * @return This builder for chaining. */ public Builder setType(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; type_ = value.getNumber(); onChanged(); return this; } /** * required .hbase.pb.ProcedureWALEntry.Type type = 1; * @return This builder for chaining. */ public Builder clearType() { bitField0_ = (bitField0_ & ~0x00000001); type_ = 1; onChanged(); return this; } private java.util.List procedure_ = java.util.Collections.emptyList(); private void ensureProcedureIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { procedure_ = new java.util.ArrayList(procedure_); bitField0_ |= 0x00000002; } } private org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder> procedureBuilder_; /** * repeated .hbase.pb.Procedure procedure = 2; */ public java.util.List getProcedureList() { if (procedureBuilder_ == null) { return java.util.Collections.unmodifiableList(procedure_); } else { return procedureBuilder_.getMessageList(); } } /** * repeated .hbase.pb.Procedure procedure = 2; */ public int getProcedureCount() { if (procedureBuilder_ == null) { return procedure_.size(); } else { return procedureBuilder_.getCount(); } } /** * repeated .hbase.pb.Procedure procedure = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure getProcedure(int index) { if (procedureBuilder_ == null) { return procedure_.get(index); } else { return procedureBuilder_.getMessage(index); } } /** * repeated .hbase.pb.Procedure procedure = 2; */ public Builder setProcedure( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure value) { if (procedureBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureProcedureIsMutable(); procedure_.set(index, value); onChanged(); } else { procedureBuilder_.setMessage(index, value); } return this; } /** * repeated .hbase.pb.Procedure procedure = 2; */ public Builder setProcedure( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue) { if (procedureBuilder_ == null) { ensureProcedureIsMutable(); procedure_.set(index, builderForValue.build()); onChanged(); } else { procedureBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hbase.pb.Procedure procedure = 2; */ public Builder addProcedure(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure value) { if (procedureBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureProcedureIsMutable(); procedure_.add(value); onChanged(); } else { procedureBuilder_.addMessage(value); } return this; } /** * repeated .hbase.pb.Procedure procedure = 2; */ public Builder addProcedure( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure value) { if (procedureBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureProcedureIsMutable(); procedure_.add(index, value); onChanged(); } else { procedureBuilder_.addMessage(index, value); } return this; } /** * repeated .hbase.pb.Procedure procedure = 2; */ public Builder addProcedure( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue) { if (procedureBuilder_ == null) { ensureProcedureIsMutable(); procedure_.add(builderForValue.build()); onChanged(); } else { procedureBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hbase.pb.Procedure procedure = 2; */ public Builder addProcedure( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue) { if (procedureBuilder_ == null) { ensureProcedureIsMutable(); procedure_.add(index, builderForValue.build()); onChanged(); } else { procedureBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hbase.pb.Procedure procedure = 2; */ public Builder addAllProcedure( java.lang.Iterable values) { if (procedureBuilder_ == null) { ensureProcedureIsMutable(); org.apache.hbase.thirdparty.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, procedure_); onChanged(); } else { procedureBuilder_.addAllMessages(values); } return this; } /** * repeated .hbase.pb.Procedure procedure = 2; */ public Builder clearProcedure() { if (procedureBuilder_ == null) { procedure_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { procedureBuilder_.clear(); } return this; } /** * repeated .hbase.pb.Procedure procedure = 2; */ public Builder removeProcedure(int index) { if (procedureBuilder_ == null) { ensureProcedureIsMutable(); procedure_.remove(index); onChanged(); } else { procedureBuilder_.remove(index); } return this; } /** * repeated .hbase.pb.Procedure procedure = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder getProcedureBuilder( int index) { return getProcedureFieldBuilder().getBuilder(index); } /** * repeated .hbase.pb.Procedure procedure = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getProcedureOrBuilder( int index) { if (procedureBuilder_ == null) { return procedure_.get(index); } else { return procedureBuilder_.getMessageOrBuilder(index); } } /** * repeated .hbase.pb.Procedure procedure = 2; */ public java.util.List getProcedureOrBuilderList() { if (procedureBuilder_ != null) { return procedureBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(procedure_); } } /** * repeated .hbase.pb.Procedure procedure = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder addProcedureBuilder() { return getProcedureFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance()); } /** * repeated .hbase.pb.Procedure procedure = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder addProcedureBuilder( int index) { return getProcedureFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance()); } /** * repeated .hbase.pb.Procedure procedure = 2; */ public java.util.List getProcedureBuilderList() { return getProcedureFieldBuilder().getBuilderList(); } private org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder> getProcedureFieldBuilder() { if (procedureBuilder_ == null) { procedureBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>( procedure_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); procedure_ = null; } return procedureBuilder_; } private long procId_ ; /** * optional uint64 proc_id = 3; * @return Whether the procId field is set. */ @java.lang.Override public boolean hasProcId() { return ((bitField0_ & 0x00000004) != 0); } /** * optional uint64 proc_id = 3; * @return The procId. */ @java.lang.Override public long getProcId() { return procId_; } /** * optional uint64 proc_id = 3; * @param value The procId to set. * @return This builder for chaining. */ public Builder setProcId(long value) { bitField0_ |= 0x00000004; procId_ = value; onChanged(); return this; } /** * optional uint64 proc_id = 3; * @return This builder for chaining. */ public Builder clearProcId() { bitField0_ = (bitField0_ & ~0x00000004); procId_ = 0L; onChanged(); return this; } private org.apache.hbase.thirdparty.com.google.protobuf.Internal.LongList childId_ = emptyLongList(); private void ensureChildIdIsMutable() { if (!((bitField0_ & 0x00000008) != 0)) { childId_ = mutableCopy(childId_); bitField0_ |= 0x00000008; } } /** * repeated uint64 child_id = 4; * @return A list containing the childId. */ public java.util.List getChildIdList() { return ((bitField0_ & 0x00000008) != 0) ? java.util.Collections.unmodifiableList(childId_) : childId_; } /** * repeated uint64 child_id = 4; * @return The count of childId. */ public int getChildIdCount() { return childId_.size(); } /** * repeated uint64 child_id = 4; * @param index The index of the element to return. * @return The childId at the given index. */ public long getChildId(int index) { return childId_.getLong(index); } /** * repeated uint64 child_id = 4; * @param index The index to set the value at. * @param value The childId to set. * @return This builder for chaining. */ public Builder setChildId( int index, long value) { ensureChildIdIsMutable(); childId_.setLong(index, value); onChanged(); return this; } /** * repeated uint64 child_id = 4; * @param value The childId to add. * @return This builder for chaining. */ public Builder addChildId(long value) { ensureChildIdIsMutable(); childId_.addLong(value); onChanged(); return this; } /** * repeated uint64 child_id = 4; * @param values The childId to add. * @return This builder for chaining. */ public Builder addAllChildId( java.lang.Iterable values) { ensureChildIdIsMutable(); org.apache.hbase.thirdparty.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, childId_); onChanged(); return this; } /** * repeated uint64 child_id = 4; * @return This builder for chaining. */ public Builder clearChildId() { childId_ = emptyLongList(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.ProcedureWALEntry) } // @@protoc_insertion_point(class_scope:hbase.pb.ProcedureWALEntry) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser() { @java.lang.Override public ProcedureWALEntry parsePartialFrom( org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input, org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { return new ProcedureWALEntry(input, extensionRegistry); } }; public static org.apache.hbase.thirdparty.com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hbase.thirdparty.com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Procedure_descriptor; private static final org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Procedure_fieldAccessorTable; private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SequentialProcedureData_descriptor; private static final org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SequentialProcedureData_fieldAccessorTable; private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_StateMachineProcedureData_descriptor; private static final org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_StateMachineProcedureData_fieldAccessorTable; private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ProcedureWALHeader_descriptor; private static final org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ProcedureWALHeader_fieldAccessorTable; private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ProcedureWALTrailer_descriptor; private static final org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ProcedureWALTrailer_fieldAccessorTable; private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ProcedureStoreTracker_descriptor; private static final org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ProcedureStoreTracker_fieldAccessorTable; private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ProcedureStoreTracker_TrackerNode_descriptor; private static final org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ProcedureStoreTracker_TrackerNode_fieldAccessorTable; private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ProcedureWALEntry_descriptor; private static final org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ProcedureWALEntry_fieldAccessorTable; public static org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\017Procedure.proto\022\010hbase.pb\032\031google/prot" + "obuf/any.proto\032\023ErrorHandling.proto\"\252\003\n\t" + "Procedure\022\022\n\nclass_name\030\001 \002(\t\022\021\n\tparent_" + "id\030\002 \001(\004\022\017\n\007proc_id\030\003 \002(\004\022\026\n\016submitted_t" + "ime\030\004 \002(\004\022\r\n\005owner\030\005 \001(\t\022\'\n\005state\030\006 \002(\0162" + "\030.hbase.pb.ProcedureState\022\020\n\010stack_id\030\007 " + "\003(\r\022\023\n\013last_update\030\010 \002(\004\022\017\n\007timeout\030\t \001(" + "\r\0224\n\texception\030\n \001(\0132!.hbase.pb.ForeignE" + "xceptionMessage\022\016\n\006result\030\013 \001(\014\022\022\n\nstate" + "_data\030\014 \001(\014\022+\n\rstate_message\030\017 \003(\0132\024.goo" + "gle.protobuf.Any\022\026\n\013nonce_group\030\r \001(\004:\0010" + "\022\020\n\005nonce\030\016 \001(\004:\0010\022\025\n\006locked\030\020 \001(\010:\005fals" + "e\022\025\n\006bypass\030\021 \001(\010:\005false\"+\n\027SequentialPr" + "ocedureData\022\020\n\010executed\030\001 \002(\010\"*\n\031StateMa" + "chineProcedureData\022\r\n\005state\030\001 \003(\r\"X\n\022Pro" + "cedureWALHeader\022\017\n\007version\030\001 \002(\r\022\014\n\004type" + "\030\002 \002(\r\022\016\n\006log_id\030\003 \002(\004\022\023\n\013min_proc_id\030\004 " + "\002(\004\";\n\023ProcedureWALTrailer\022\017\n\007version\030\001 " + "\002(\r\022\023\n\013tracker_pos\030\002 \002(\004\"\225\001\n\025ProcedureSt" + "oreTracker\0229\n\004node\030\001 \003(\0132+.hbase.pb.Proc" + "edureStoreTracker.TrackerNode\032A\n\013Tracker" + "Node\022\020\n\010start_id\030\001 \002(\004\022\017\n\007updated\030\002 \003(\004\022" + "\017\n\007deleted\030\003 \003(\004\"\257\002\n\021ProcedureWALEntry\022." + "\n\004type\030\001 \002(\0162 .hbase.pb.ProcedureWALEntr" + "y.Type\022&\n\tprocedure\030\002 \003(\0132\023.hbase.pb.Pro" + "cedure\022\017\n\007proc_id\030\003 \001(\004\022\020\n\010child_id\030\004 \003(" + "\004\"\236\001\n\004Type\022\025\n\021PROCEDURE_WAL_EOF\020\001\022\026\n\022PRO" + "CEDURE_WAL_INIT\020\002\022\030\n\024PROCEDURE_WAL_INSER" + "T\020\003\022\030\n\024PROCEDURE_WAL_UPDATE\020\004\022\030\n\024PROCEDU" + "RE_WAL_DELETE\020\005\022\031\n\025PROCEDURE_WAL_COMPACT" + "\020\006*{\n\016ProcedureState\022\020\n\014INITIALIZING\020\001\022\014" + "\n\010RUNNABLE\020\002\022\013\n\007WAITING\020\003\022\023\n\017WAITING_TIM" + "EOUT\020\004\022\016\n\nROLLEDBACK\020\005\022\013\n\007SUCCESS\020\006\022\n\n\006F" + "AILED\020\007BL\n1org.apache.hadoop.hbase.shade" + "d.protobuf.generatedB\017ProcedureProtosH\001\210" + "\001\001\240\001\001" }; descriptor = org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hbase.thirdparty.com.google.protobuf.AnyProto.getDescriptor(), org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.getDescriptor(), }); internal_static_hbase_pb_Procedure_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_hbase_pb_Procedure_fieldAccessorTable = new org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_Procedure_descriptor, new java.lang.String[] { "ClassName", "ParentId", "ProcId", "SubmittedTime", "Owner", "State", "StackId", "LastUpdate", "Timeout", "Exception", "Result", "StateData", "StateMessage", "NonceGroup", "Nonce", "Locked", "Bypass", }); internal_static_hbase_pb_SequentialProcedureData_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_hbase_pb_SequentialProcedureData_fieldAccessorTable = new org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_SequentialProcedureData_descriptor, new java.lang.String[] { "Executed", }); internal_static_hbase_pb_StateMachineProcedureData_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_hbase_pb_StateMachineProcedureData_fieldAccessorTable = new org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_StateMachineProcedureData_descriptor, new java.lang.String[] { "State", }); internal_static_hbase_pb_ProcedureWALHeader_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_hbase_pb_ProcedureWALHeader_fieldAccessorTable = new org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ProcedureWALHeader_descriptor, new java.lang.String[] { "Version", "Type", "LogId", "MinProcId", }); internal_static_hbase_pb_ProcedureWALTrailer_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_hbase_pb_ProcedureWALTrailer_fieldAccessorTable = new org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ProcedureWALTrailer_descriptor, new java.lang.String[] { "Version", "TrackerPos", }); internal_static_hbase_pb_ProcedureStoreTracker_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_hbase_pb_ProcedureStoreTracker_fieldAccessorTable = new org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ProcedureStoreTracker_descriptor, new java.lang.String[] { "Node", }); internal_static_hbase_pb_ProcedureStoreTracker_TrackerNode_descriptor = internal_static_hbase_pb_ProcedureStoreTracker_descriptor.getNestedTypes().get(0); internal_static_hbase_pb_ProcedureStoreTracker_TrackerNode_fieldAccessorTable = new org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ProcedureStoreTracker_TrackerNode_descriptor, new java.lang.String[] { "StartId", "Updated", "Deleted", }); internal_static_hbase_pb_ProcedureWALEntry_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_hbase_pb_ProcedureWALEntry_fieldAccessorTable = new org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ProcedureWALEntry_descriptor, new java.lang.String[] { "Type", "Procedure", "ProcId", "ChildId", }); org.apache.hbase.thirdparty.com.google.protobuf.AnyProto.getDescriptor(); org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }




© 2015 - 2024 Weber Informatics LLC | Privacy Policy