All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.apache.hadoop.ha.proto.HAServiceProtocolProtos Maven / Gradle / Ivy

// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: HAServiceProtocol.proto

package org.apache.hadoop.ha.proto;

public final class HAServiceProtocolProtos {
  private HAServiceProtocolProtos() {}
  public static void registerAllExtensions(
      com.google.protobuf.ExtensionRegistry registry) {
  }
  public enum HAServiceStateProto
      implements com.google.protobuf.ProtocolMessageEnum {
    INITIALIZING(0, 0),
    ACTIVE(1, 1),
    STANDBY(2, 2),
    ;
    
    public static final int INITIALIZING_VALUE = 0;
    public static final int ACTIVE_VALUE = 1;
    public static final int STANDBY_VALUE = 2;
    
    
    public final int getNumber() { return value; }
    
    public static HAServiceStateProto valueOf(int value) {
      switch (value) {
        case 0: return INITIALIZING;
        case 1: return ACTIVE;
        case 2: return STANDBY;
        default: return null;
      }
    }
    
    public static com.google.protobuf.Internal.EnumLiteMap
        internalGetValueMap() {
      return internalValueMap;
    }
    private static com.google.protobuf.Internal.EnumLiteMap
        internalValueMap =
          new com.google.protobuf.Internal.EnumLiteMap() {
            public HAServiceStateProto findValueByNumber(int number) {
              return HAServiceStateProto.valueOf(number);
            }
          };
    
    public final com.google.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(index);
    }
    public final com.google.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final com.google.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.getDescriptor().getEnumTypes().get(0);
    }
    
    private static final HAServiceStateProto[] VALUES = {
      INITIALIZING, ACTIVE, STANDBY, 
    };
    
    public static HAServiceStateProto valueOf(
        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }
    
    private final int index;
    private final int value;
    
    private HAServiceStateProto(int index, int value) {
      this.index = index;
      this.value = value;
    }
    
    // @@protoc_insertion_point(enum_scope:HAServiceStateProto)
  }
  
  public enum HARequestSource
      implements com.google.protobuf.ProtocolMessageEnum {
    REQUEST_BY_USER(0, 0),
    REQUEST_BY_USER_FORCED(1, 1),
    REQUEST_BY_ZKFC(2, 2),
    ;
    
    public static final int REQUEST_BY_USER_VALUE = 0;
    public static final int REQUEST_BY_USER_FORCED_VALUE = 1;
    public static final int REQUEST_BY_ZKFC_VALUE = 2;
    
    
    public final int getNumber() { return value; }
    
    public static HARequestSource valueOf(int value) {
      switch (value) {
        case 0: return REQUEST_BY_USER;
        case 1: return REQUEST_BY_USER_FORCED;
        case 2: return REQUEST_BY_ZKFC;
        default: return null;
      }
    }
    
    public static com.google.protobuf.Internal.EnumLiteMap
        internalGetValueMap() {
      return internalValueMap;
    }
    private static com.google.protobuf.Internal.EnumLiteMap
        internalValueMap =
          new com.google.protobuf.Internal.EnumLiteMap() {
            public HARequestSource findValueByNumber(int number) {
              return HARequestSource.valueOf(number);
            }
          };
    
    public final com.google.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(index);
    }
    public final com.google.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final com.google.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.getDescriptor().getEnumTypes().get(1);
    }
    
    private static final HARequestSource[] VALUES = {
      REQUEST_BY_USER, REQUEST_BY_USER_FORCED, REQUEST_BY_ZKFC, 
    };
    
    public static HARequestSource valueOf(
        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }
    
    private final int index;
    private final int value;
    
    private HARequestSource(int index, int value) {
      this.index = index;
      this.value = value;
    }
    
    // @@protoc_insertion_point(enum_scope:HARequestSource)
  }
  
  public interface HAStateChangeRequestInfoProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required .HARequestSource reqSource = 1;
    boolean hasReqSource();
    org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource getReqSource();
  }
  public static final class HAStateChangeRequestInfoProto extends
      com.google.protobuf.GeneratedMessage
      implements HAStateChangeRequestInfoProtoOrBuilder {
    // Use HAStateChangeRequestInfoProto.newBuilder() to construct.
    private HAStateChangeRequestInfoProto(Builder builder) {
      super(builder);
    }
    private HAStateChangeRequestInfoProto(boolean noInit) {}
    
    private static final HAStateChangeRequestInfoProto defaultInstance;
    public static HAStateChangeRequestInfoProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public HAStateChangeRequestInfoProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_HAStateChangeRequestInfoProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_HAStateChangeRequestInfoProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required .HARequestSource reqSource = 1;
    public static final int REQSOURCE_FIELD_NUMBER = 1;
    private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource reqSource_;
    public boolean hasReqSource() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource getReqSource() {
      return reqSource_;
    }
    
    private void initFields() {
      reqSource_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource.REQUEST_BY_USER;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasReqSource()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeEnum(1, reqSource_.getNumber());
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeEnumSize(1, reqSource_.getNumber());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto) obj;
      
      boolean result = true;
      result = result && (hasReqSource() == other.hasReqSource());
      if (hasReqSource()) {
        result = result &&
            (getReqSource() == other.getReqSource());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasReqSource()) {
        hash = (37 * hash) + REQSOURCE_FIELD_NUMBER;
        hash = (53 * hash) + hashEnum(getReqSource());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_HAStateChangeRequestInfoProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_HAStateChangeRequestInfoProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        reqSource_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource.REQUEST_BY_USER;
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDescriptor();
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto getDefaultInstanceForType() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto build() {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto buildPartial() {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.reqSource_ = reqSource_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto) {
          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto other) {
        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance()) return this;
        if (other.hasReqSource()) {
          setReqSource(other.getReqSource());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasReqSource()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 8: {
              int rawValue = input.readEnum();
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource value = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource.valueOf(rawValue);
              if (value == null) {
                unknownFields.mergeVarintField(1, rawValue);
              } else {
                bitField0_ |= 0x00000001;
                reqSource_ = value;
              }
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required .HARequestSource reqSource = 1;
      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource reqSource_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource.REQUEST_BY_USER;
      public boolean hasReqSource() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource getReqSource() {
        return reqSource_;
      }
      public Builder setReqSource(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000001;
        reqSource_ = value;
        onChanged();
        return this;
      }
      public Builder clearReqSource() {
        bitField0_ = (bitField0_ & ~0x00000001);
        reqSource_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource.REQUEST_BY_USER;
        onChanged();
        return this;
      }
      
      // @@protoc_insertion_point(builder_scope:HAStateChangeRequestInfoProto)
    }
    
    static {
      defaultInstance = new HAStateChangeRequestInfoProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:HAStateChangeRequestInfoProto)
  }
  
  public interface MonitorHealthRequestProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
  }
  public static final class MonitorHealthRequestProto extends
      com.google.protobuf.GeneratedMessage
      implements MonitorHealthRequestProtoOrBuilder {
    // Use MonitorHealthRequestProto.newBuilder() to construct.
    private MonitorHealthRequestProto(Builder builder) {
      super(builder);
    }
    private MonitorHealthRequestProto(boolean noInit) {}
    
    private static final MonitorHealthRequestProto defaultInstance;
    public static MonitorHealthRequestProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public MonitorHealthRequestProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_MonitorHealthRequestProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_MonitorHealthRequestProto_fieldAccessorTable;
    }
    
    private void initFields() {
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto) obj;
      
      boolean result = true;
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_MonitorHealthRequestProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_MonitorHealthRequestProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.getDescriptor();
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto build() {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto buildPartial() {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto(this);
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto) {
          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto other) {
        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
          }
        }
      }
      
      
      // @@protoc_insertion_point(builder_scope:MonitorHealthRequestProto)
    }
    
    static {
      defaultInstance = new MonitorHealthRequestProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:MonitorHealthRequestProto)
  }
  
  public interface MonitorHealthResponseProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
  }
  public static final class MonitorHealthResponseProto extends
      com.google.protobuf.GeneratedMessage
      implements MonitorHealthResponseProtoOrBuilder {
    // Use MonitorHealthResponseProto.newBuilder() to construct.
    private MonitorHealthResponseProto(Builder builder) {
      super(builder);
    }
    private MonitorHealthResponseProto(boolean noInit) {}
    
    private static final MonitorHealthResponseProto defaultInstance;
    public static MonitorHealthResponseProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public MonitorHealthResponseProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_MonitorHealthResponseProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_MonitorHealthResponseProto_fieldAccessorTable;
    }
    
    private void initFields() {
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto) obj;
      
      boolean result = true;
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_MonitorHealthResponseProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_MonitorHealthResponseProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDescriptor();
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto build() {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto buildPartial() {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto(this);
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto) {
          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto other) {
        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
          }
        }
      }
      
      
      // @@protoc_insertion_point(builder_scope:MonitorHealthResponseProto)
    }
    
    static {
      defaultInstance = new MonitorHealthResponseProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:MonitorHealthResponseProto)
  }
  
  public interface TransitionToActiveRequestProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required .HAStateChangeRequestInfoProto reqInfo = 1;
    boolean hasReqInfo();
    org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto getReqInfo();
    org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder getReqInfoOrBuilder();
  }
  public static final class TransitionToActiveRequestProto extends
      com.google.protobuf.GeneratedMessage
      implements TransitionToActiveRequestProtoOrBuilder {
    // Use TransitionToActiveRequestProto.newBuilder() to construct.
    private TransitionToActiveRequestProto(Builder builder) {
      super(builder);
    }
    private TransitionToActiveRequestProto(boolean noInit) {}
    
    private static final TransitionToActiveRequestProto defaultInstance;
    public static TransitionToActiveRequestProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public TransitionToActiveRequestProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToActiveRequestProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToActiveRequestProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required .HAStateChangeRequestInfoProto reqInfo = 1;
    public static final int REQINFO_FIELD_NUMBER = 1;
    private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto reqInfo_;
    public boolean hasReqInfo() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto getReqInfo() {
      return reqInfo_;
    }
    public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder getReqInfoOrBuilder() {
      return reqInfo_;
    }
    
    private void initFields() {
      reqInfo_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasReqInfo()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!getReqInfo().isInitialized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, reqInfo_);
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, reqInfo_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto) obj;
      
      boolean result = true;
      result = result && (hasReqInfo() == other.hasReqInfo());
      if (hasReqInfo()) {
        result = result && getReqInfo()
            .equals(other.getReqInfo());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasReqInfo()) {
        hash = (37 * hash) + REQINFO_FIELD_NUMBER;
        hash = (53 * hash) + getReqInfo().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToActiveRequestProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToActiveRequestProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getReqInfoFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        if (reqInfoBuilder_ == null) {
          reqInfo_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
        } else {
          reqInfoBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.getDescriptor();
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto build() {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto buildPartial() {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (reqInfoBuilder_ == null) {
          result.reqInfo_ = reqInfo_;
        } else {
          result.reqInfo_ = reqInfoBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto) {
          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto other) {
        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.getDefaultInstance()) return this;
        if (other.hasReqInfo()) {
          mergeReqInfo(other.getReqInfo());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasReqInfo()) {
          
          return false;
        }
        if (!getReqInfo().isInitialized()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder subBuilder = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.newBuilder();
              if (hasReqInfo()) {
                subBuilder.mergeFrom(getReqInfo());
              }
              input.readMessage(subBuilder, extensionRegistry);
              setReqInfo(subBuilder.buildPartial());
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required .HAStateChangeRequestInfoProto reqInfo = 1;
      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto reqInfo_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder> reqInfoBuilder_;
      public boolean hasReqInfo() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto getReqInfo() {
        if (reqInfoBuilder_ == null) {
          return reqInfo_;
        } else {
          return reqInfoBuilder_.getMessage();
        }
      }
      public Builder setReqInfo(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto value) {
        if (reqInfoBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          reqInfo_ = value;
          onChanged();
        } else {
          reqInfoBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      public Builder setReqInfo(
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder builderForValue) {
        if (reqInfoBuilder_ == null) {
          reqInfo_ = builderForValue.build();
          onChanged();
        } else {
          reqInfoBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      public Builder mergeReqInfo(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto value) {
        if (reqInfoBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              reqInfo_ != org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance()) {
            reqInfo_ =
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.newBuilder(reqInfo_).mergeFrom(value).buildPartial();
          } else {
            reqInfo_ = value;
          }
          onChanged();
        } else {
          reqInfoBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      public Builder clearReqInfo() {
        if (reqInfoBuilder_ == null) {
          reqInfo_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
          onChanged();
        } else {
          reqInfoBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder getReqInfoBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getReqInfoFieldBuilder().getBuilder();
      }
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder getReqInfoOrBuilder() {
        if (reqInfoBuilder_ != null) {
          return reqInfoBuilder_.getMessageOrBuilder();
        } else {
          return reqInfo_;
        }
      }
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder> 
          getReqInfoFieldBuilder() {
        if (reqInfoBuilder_ == null) {
          reqInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder>(
                  reqInfo_,
                  getParentForChildren(),
                  isClean());
          reqInfo_ = null;
        }
        return reqInfoBuilder_;
      }
      
      // @@protoc_insertion_point(builder_scope:TransitionToActiveRequestProto)
    }
    
    static {
      defaultInstance = new TransitionToActiveRequestProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:TransitionToActiveRequestProto)
  }
  
  public interface TransitionToActiveResponseProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
  }
  public static final class TransitionToActiveResponseProto extends
      com.google.protobuf.GeneratedMessage
      implements TransitionToActiveResponseProtoOrBuilder {
    // Use TransitionToActiveResponseProto.newBuilder() to construct.
    private TransitionToActiveResponseProto(Builder builder) {
      super(builder);
    }
    private TransitionToActiveResponseProto(boolean noInit) {}
    
    private static final TransitionToActiveResponseProto defaultInstance;
    public static TransitionToActiveResponseProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public TransitionToActiveResponseProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToActiveResponseProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToActiveResponseProto_fieldAccessorTable;
    }
    
    private void initFields() {
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto) obj;
      
      boolean result = true;
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToActiveResponseProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToActiveResponseProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDescriptor();
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto build() {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto buildPartial() {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto(this);
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto) {
          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto other) {
        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
          }
        }
      }
      
      
      // @@protoc_insertion_point(builder_scope:TransitionToActiveResponseProto)
    }
    
    static {
      defaultInstance = new TransitionToActiveResponseProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:TransitionToActiveResponseProto)
  }
  
  public interface TransitionToStandbyRequestProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required .HAStateChangeRequestInfoProto reqInfo = 1;
    boolean hasReqInfo();
    org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto getReqInfo();
    org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder getReqInfoOrBuilder();
  }
  public static final class TransitionToStandbyRequestProto extends
      com.google.protobuf.GeneratedMessage
      implements TransitionToStandbyRequestProtoOrBuilder {
    // Use TransitionToStandbyRequestProto.newBuilder() to construct.
    private TransitionToStandbyRequestProto(Builder builder) {
      super(builder);
    }
    private TransitionToStandbyRequestProto(boolean noInit) {}
    
    private static final TransitionToStandbyRequestProto defaultInstance;
    public static TransitionToStandbyRequestProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public TransitionToStandbyRequestProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToStandbyRequestProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToStandbyRequestProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required .HAStateChangeRequestInfoProto reqInfo = 1;
    public static final int REQINFO_FIELD_NUMBER = 1;
    private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto reqInfo_;
    public boolean hasReqInfo() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto getReqInfo() {
      return reqInfo_;
    }
    public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder getReqInfoOrBuilder() {
      return reqInfo_;
    }
    
    private void initFields() {
      reqInfo_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasReqInfo()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!getReqInfo().isInitialized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, reqInfo_);
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, reqInfo_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto) obj;
      
      boolean result = true;
      result = result && (hasReqInfo() == other.hasReqInfo());
      if (hasReqInfo()) {
        result = result && getReqInfo()
            .equals(other.getReqInfo());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasReqInfo()) {
        hash = (37 * hash) + REQINFO_FIELD_NUMBER;
        hash = (53 * hash) + getReqInfo().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToStandbyRequestProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToStandbyRequestProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getReqInfoFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        if (reqInfoBuilder_ == null) {
          reqInfo_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
        } else {
          reqInfoBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.getDescriptor();
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto build() {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto buildPartial() {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (reqInfoBuilder_ == null) {
          result.reqInfo_ = reqInfo_;
        } else {
          result.reqInfo_ = reqInfoBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto) {
          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto other) {
        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.getDefaultInstance()) return this;
        if (other.hasReqInfo()) {
          mergeReqInfo(other.getReqInfo());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasReqInfo()) {
          
          return false;
        }
        if (!getReqInfo().isInitialized()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder subBuilder = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.newBuilder();
              if (hasReqInfo()) {
                subBuilder.mergeFrom(getReqInfo());
              }
              input.readMessage(subBuilder, extensionRegistry);
              setReqInfo(subBuilder.buildPartial());
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required .HAStateChangeRequestInfoProto reqInfo = 1;
      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto reqInfo_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder> reqInfoBuilder_;
      public boolean hasReqInfo() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto getReqInfo() {
        if (reqInfoBuilder_ == null) {
          return reqInfo_;
        } else {
          return reqInfoBuilder_.getMessage();
        }
      }
      public Builder setReqInfo(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto value) {
        if (reqInfoBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          reqInfo_ = value;
          onChanged();
        } else {
          reqInfoBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      public Builder setReqInfo(
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder builderForValue) {
        if (reqInfoBuilder_ == null) {
          reqInfo_ = builderForValue.build();
          onChanged();
        } else {
          reqInfoBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      public Builder mergeReqInfo(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto value) {
        if (reqInfoBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              reqInfo_ != org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance()) {
            reqInfo_ =
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.newBuilder(reqInfo_).mergeFrom(value).buildPartial();
          } else {
            reqInfo_ = value;
          }
          onChanged();
        } else {
          reqInfoBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      public Builder clearReqInfo() {
        if (reqInfoBuilder_ == null) {
          reqInfo_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
          onChanged();
        } else {
          reqInfoBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder getReqInfoBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getReqInfoFieldBuilder().getBuilder();
      }
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder getReqInfoOrBuilder() {
        if (reqInfoBuilder_ != null) {
          return reqInfoBuilder_.getMessageOrBuilder();
        } else {
          return reqInfo_;
        }
      }
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder> 
          getReqInfoFieldBuilder() {
        if (reqInfoBuilder_ == null) {
          reqInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder>(
                  reqInfo_,
                  getParentForChildren(),
                  isClean());
          reqInfo_ = null;
        }
        return reqInfoBuilder_;
      }
      
      // @@protoc_insertion_point(builder_scope:TransitionToStandbyRequestProto)
    }
    
    static {
      defaultInstance = new TransitionToStandbyRequestProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:TransitionToStandbyRequestProto)
  }
  
  public interface TransitionToStandbyResponseProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
  }
  public static final class TransitionToStandbyResponseProto extends
      com.google.protobuf.GeneratedMessage
      implements TransitionToStandbyResponseProtoOrBuilder {
    // Use TransitionToStandbyResponseProto.newBuilder() to construct.
    private TransitionToStandbyResponseProto(Builder builder) {
      super(builder);
    }
    private TransitionToStandbyResponseProto(boolean noInit) {}
    
    private static final TransitionToStandbyResponseProto defaultInstance;
    public static TransitionToStandbyResponseProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public TransitionToStandbyResponseProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToStandbyResponseProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToStandbyResponseProto_fieldAccessorTable;
    }
    
    private void initFields() {
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto) obj;
      
      boolean result = true;
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToStandbyResponseProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToStandbyResponseProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDescriptor();
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto build() {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto buildPartial() {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto(this);
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto) {
          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto other) {
        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
          }
        }
      }
      
      
      // @@protoc_insertion_point(builder_scope:TransitionToStandbyResponseProto)
    }
    
    static {
      defaultInstance = new TransitionToStandbyResponseProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:TransitionToStandbyResponseProto)
  }
  
  public interface GetServiceStatusRequestProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
  }
  public static final class GetServiceStatusRequestProto extends
      com.google.protobuf.GeneratedMessage
      implements GetServiceStatusRequestProtoOrBuilder {
    // Use GetServiceStatusRequestProto.newBuilder() to construct.
    private GetServiceStatusRequestProto(Builder builder) {
      super(builder);
    }
    private GetServiceStatusRequestProto(boolean noInit) {}
    
    private static final GetServiceStatusRequestProto defaultInstance;
    public static GetServiceStatusRequestProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public GetServiceStatusRequestProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_GetServiceStatusRequestProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_GetServiceStatusRequestProto_fieldAccessorTable;
    }
    
    private void initFields() {
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto) obj;
      
      boolean result = true;
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_GetServiceStatusRequestProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_GetServiceStatusRequestProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.getDescriptor();
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto build() {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto buildPartial() {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto(this);
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto) {
          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto other) {
        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
          }
        }
      }
      
      
      // @@protoc_insertion_point(builder_scope:GetServiceStatusRequestProto)
    }
    
    static {
      defaultInstance = new GetServiceStatusRequestProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:GetServiceStatusRequestProto)
  }
  
  public interface GetServiceStatusResponseProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required .HAServiceStateProto state = 1;
    boolean hasState();
    org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto getState();
    
    // optional bool readyToBecomeActive = 2;
    boolean hasReadyToBecomeActive();
    boolean getReadyToBecomeActive();
    
    // optional string notReadyReason = 3;
    boolean hasNotReadyReason();
    String getNotReadyReason();
  }
  public static final class GetServiceStatusResponseProto extends
      com.google.protobuf.GeneratedMessage
      implements GetServiceStatusResponseProtoOrBuilder {
    // Use GetServiceStatusResponseProto.newBuilder() to construct.
    private GetServiceStatusResponseProto(Builder builder) {
      super(builder);
    }
    private GetServiceStatusResponseProto(boolean noInit) {}
    
    private static final GetServiceStatusResponseProto defaultInstance;
    public static GetServiceStatusResponseProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public GetServiceStatusResponseProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_GetServiceStatusResponseProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_GetServiceStatusResponseProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required .HAServiceStateProto state = 1;
    public static final int STATE_FIELD_NUMBER = 1;
    private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto state_;
    public boolean hasState() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto getState() {
      return state_;
    }
    
    // optional bool readyToBecomeActive = 2;
    public static final int READYTOBECOMEACTIVE_FIELD_NUMBER = 2;
    private boolean readyToBecomeActive_;
    public boolean hasReadyToBecomeActive() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    public boolean getReadyToBecomeActive() {
      return readyToBecomeActive_;
    }
    
    // optional string notReadyReason = 3;
    public static final int NOTREADYREASON_FIELD_NUMBER = 3;
    private java.lang.Object notReadyReason_;
    public boolean hasNotReadyReason() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    public String getNotReadyReason() {
      java.lang.Object ref = notReadyReason_;
      if (ref instanceof String) {
        return (String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        String s = bs.toStringUtf8();
        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
          notReadyReason_ = s;
        }
        return s;
      }
    }
    private com.google.protobuf.ByteString getNotReadyReasonBytes() {
      java.lang.Object ref = notReadyReason_;
      if (ref instanceof String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
        notReadyReason_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }
    
    private void initFields() {
      state_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto.INITIALIZING;
      readyToBecomeActive_ = false;
      notReadyReason_ = "";
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasState()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeEnum(1, state_.getNumber());
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeBool(2, readyToBecomeActive_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeBytes(3, getNotReadyReasonBytes());
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeEnumSize(1, state_.getNumber());
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBoolSize(2, readyToBecomeActive_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(3, getNotReadyReasonBytes());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto) obj;
      
      boolean result = true;
      result = result && (hasState() == other.hasState());
      if (hasState()) {
        result = result &&
            (getState() == other.getState());
      }
      result = result && (hasReadyToBecomeActive() == other.hasReadyToBecomeActive());
      if (hasReadyToBecomeActive()) {
        result = result && (getReadyToBecomeActive()
            == other.getReadyToBecomeActive());
      }
      result = result && (hasNotReadyReason() == other.hasNotReadyReason());
      if (hasNotReadyReason()) {
        result = result && getNotReadyReason()
            .equals(other.getNotReadyReason());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasState()) {
        hash = (37 * hash) + STATE_FIELD_NUMBER;
        hash = (53 * hash) + hashEnum(getState());
      }
      if (hasReadyToBecomeActive()) {
        hash = (37 * hash) + READYTOBECOMEACTIVE_FIELD_NUMBER;
        hash = (53 * hash) + hashBoolean(getReadyToBecomeActive());
      }
      if (hasNotReadyReason()) {
        hash = (37 * hash) + NOTREADYREASON_FIELD_NUMBER;
        hash = (53 * hash) + getNotReadyReason().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_GetServiceStatusResponseProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_GetServiceStatusResponseProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        state_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto.INITIALIZING;
        bitField0_ = (bitField0_ & ~0x00000001);
        readyToBecomeActive_ = false;
        bitField0_ = (bitField0_ & ~0x00000002);
        notReadyReason_ = "";
        bitField0_ = (bitField0_ & ~0x00000004);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDescriptor();
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto build() {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto buildPartial() {
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.state_ = state_;
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.readyToBecomeActive_ = readyToBecomeActive_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        result.notReadyReason_ = notReadyReason_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto) {
          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto other) {
        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance()) return this;
        if (other.hasState()) {
          setState(other.getState());
        }
        if (other.hasReadyToBecomeActive()) {
          setReadyToBecomeActive(other.getReadyToBecomeActive());
        }
        if (other.hasNotReadyReason()) {
          setNotReadyReason(other.getNotReadyReason());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasState()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 8: {
              int rawValue = input.readEnum();
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto value = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto.valueOf(rawValue);
              if (value == null) {
                unknownFields.mergeVarintField(1, rawValue);
              } else {
                bitField0_ |= 0x00000001;
                state_ = value;
              }
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              readyToBecomeActive_ = input.readBool();
              break;
            }
            case 26: {
              bitField0_ |= 0x00000004;
              notReadyReason_ = input.readBytes();
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required .HAServiceStateProto state = 1;
      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto state_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto.INITIALIZING;
      public boolean hasState() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto getState() {
        return state_;
      }
      public Builder setState(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000001;
        state_ = value;
        onChanged();
        return this;
      }
      public Builder clearState() {
        bitField0_ = (bitField0_ & ~0x00000001);
        state_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto.INITIALIZING;
        onChanged();
        return this;
      }
      
      // optional bool readyToBecomeActive = 2;
      private boolean readyToBecomeActive_ ;
      public boolean hasReadyToBecomeActive() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      public boolean getReadyToBecomeActive() {
        return readyToBecomeActive_;
      }
      public Builder setReadyToBecomeActive(boolean value) {
        bitField0_ |= 0x00000002;
        readyToBecomeActive_ = value;
        onChanged();
        return this;
      }
      public Builder clearReadyToBecomeActive() {
        bitField0_ = (bitField0_ & ~0x00000002);
        readyToBecomeActive_ = false;
        onChanged();
        return this;
      }
      
      // optional string notReadyReason = 3;
      private java.lang.Object notReadyReason_ = "";
      public boolean hasNotReadyReason() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      public String getNotReadyReason() {
        java.lang.Object ref = notReadyReason_;
        if (!(ref instanceof String)) {
          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
          notReadyReason_ = s;
          return s;
        } else {
          return (String) ref;
        }
      }
      public Builder setNotReadyReason(String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000004;
        notReadyReason_ = value;
        onChanged();
        return this;
      }
      public Builder clearNotReadyReason() {
        bitField0_ = (bitField0_ & ~0x00000004);
        notReadyReason_ = getDefaultInstance().getNotReadyReason();
        onChanged();
        return this;
      }
      void setNotReadyReason(com.google.protobuf.ByteString value) {
        bitField0_ |= 0x00000004;
        notReadyReason_ = value;
        onChanged();
      }
      
      // @@protoc_insertion_point(builder_scope:GetServiceStatusResponseProto)
    }
    
    static {
      defaultInstance = new GetServiceStatusResponseProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:GetServiceStatusResponseProto)
  }
  
  public static abstract class HAServiceProtocolService
      implements com.google.protobuf.Service {
    protected HAServiceProtocolService() {}
    
    public interface Interface {
      public abstract void monitorHealth(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto request,
          com.google.protobuf.RpcCallback done);
      
      public abstract void transitionToActive(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto request,
          com.google.protobuf.RpcCallback done);
      
      public abstract void transitionToStandby(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto request,
          com.google.protobuf.RpcCallback done);
      
      public abstract void getServiceStatus(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto request,
          com.google.protobuf.RpcCallback done);
      
    }
    
    public static com.google.protobuf.Service newReflectiveService(
        final Interface impl) {
      return new HAServiceProtocolService() {
        @java.lang.Override
        public  void monitorHealth(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto request,
            com.google.protobuf.RpcCallback done) {
          impl.monitorHealth(controller, request, done);
        }
        
        @java.lang.Override
        public  void transitionToActive(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto request,
            com.google.protobuf.RpcCallback done) {
          impl.transitionToActive(controller, request, done);
        }
        
        @java.lang.Override
        public  void transitionToStandby(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto request,
            com.google.protobuf.RpcCallback done) {
          impl.transitionToStandby(controller, request, done);
        }
        
        @java.lang.Override
        public  void getServiceStatus(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto request,
            com.google.protobuf.RpcCallback done) {
          impl.getServiceStatus(controller, request, done);
        }
        
      };
    }
    
    public static com.google.protobuf.BlockingService
        newReflectiveBlockingService(final BlockingInterface impl) {
      return new com.google.protobuf.BlockingService() {
        public final com.google.protobuf.Descriptors.ServiceDescriptor
            getDescriptorForType() {
          return getDescriptor();
        }
        
        public final com.google.protobuf.Message callBlockingMethod(
            com.google.protobuf.Descriptors.MethodDescriptor method,
            com.google.protobuf.RpcController controller,
            com.google.protobuf.Message request)
            throws com.google.protobuf.ServiceException {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.callBlockingMethod() given method descriptor for " +
              "wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return impl.monitorHealth(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto)request);
            case 1:
              return impl.transitionToActive(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto)request);
            case 2:
              return impl.transitionToStandby(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto)request);
            case 3:
              return impl.getServiceStatus(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto)request);
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }
        
        public final com.google.protobuf.Message
            getRequestPrototype(
            com.google.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getRequestPrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.getDefaultInstance();
            case 2:
              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.getDefaultInstance();
            case 3:
              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }
        
        public final com.google.protobuf.Message
            getResponsePrototype(
            com.google.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getResponsePrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance();
            case 2:
              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance();
            case 3:
              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }
        
      };
    }
    
    public abstract void monitorHealth(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto request,
        com.google.protobuf.RpcCallback done);
    
    public abstract void transitionToActive(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto request,
        com.google.protobuf.RpcCallback done);
    
    public abstract void transitionToStandby(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto request,
        com.google.protobuf.RpcCallback done);
    
    public abstract void getServiceStatus(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto request,
        com.google.protobuf.RpcCallback done);
    
    public static final
        com.google.protobuf.Descriptors.ServiceDescriptor
        getDescriptor() {
      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.getDescriptor().getServices().get(0);
    }
    public final com.google.protobuf.Descriptors.ServiceDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    
    public final void callMethod(
        com.google.protobuf.Descriptors.MethodDescriptor method,
        com.google.protobuf.RpcController controller,
        com.google.protobuf.Message request,
        com.google.protobuf.RpcCallback<
          com.google.protobuf.Message> done) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.callMethod() given method descriptor for wrong " +
          "service type.");
      }
      switch(method.getIndex()) {
        case 0:
          this.monitorHealth(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto)request,
            com.google.protobuf.RpcUtil.specializeCallback(
              done));
          return;
        case 1:
          this.transitionToActive(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto)request,
            com.google.protobuf.RpcUtil.specializeCallback(
              done));
          return;
        case 2:
          this.transitionToStandby(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto)request,
            com.google.protobuf.RpcUtil.specializeCallback(
              done));
          return;
        case 3:
          this.getServiceStatus(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto)request,
            com.google.protobuf.RpcUtil.specializeCallback(
              done));
          return;
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }
    
    public final com.google.protobuf.Message
        getRequestPrototype(
        com.google.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getRequestPrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.getDefaultInstance();
        case 2:
          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.getDefaultInstance();
        case 3:
          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }
    
    public final com.google.protobuf.Message
        getResponsePrototype(
        com.google.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getResponsePrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance();
        case 2:
          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance();
        case 3:
          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }
    
    public static Stub newStub(
        com.google.protobuf.RpcChannel channel) {
      return new Stub(channel);
    }
    
    public static final class Stub extends org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceProtocolService implements Interface {
      private Stub(com.google.protobuf.RpcChannel channel) {
        this.channel = channel;
      }
      
      private final com.google.protobuf.RpcChannel channel;
      
      public com.google.protobuf.RpcChannel getChannel() {
        return channel;
      }
      
      public  void monitorHealth(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto request,
          com.google.protobuf.RpcCallback done) {
        channel.callMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.class,
            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance()));
      }
      
      public  void transitionToActive(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto request,
          com.google.protobuf.RpcCallback done) {
        channel.callMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.class,
            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance()));
      }
      
      public  void transitionToStandby(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto request,
          com.google.protobuf.RpcCallback done) {
        channel.callMethod(
          getDescriptor().getMethods().get(2),
          controller,
          request,
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.class,
            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance()));
      }
      
      public  void getServiceStatus(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto request,
          com.google.protobuf.RpcCallback done) {
        channel.callMethod(
          getDescriptor().getMethods().get(3),
          controller,
          request,
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.class,
            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance()));
      }
    }
    
    public static BlockingInterface newBlockingStub(
        com.google.protobuf.BlockingRpcChannel channel) {
      return new BlockingStub(channel);
    }
    
    public interface BlockingInterface {
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto monitorHealth(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto request)
          throws com.google.protobuf.ServiceException;
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto transitionToActive(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto request)
          throws com.google.protobuf.ServiceException;
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto transitionToStandby(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto request)
          throws com.google.protobuf.ServiceException;
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto getServiceStatus(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto request)
          throws com.google.protobuf.ServiceException;
    }
    
    private static final class BlockingStub implements BlockingInterface {
      private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
        this.channel = channel;
      }
      
      private final com.google.protobuf.BlockingRpcChannel channel;
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto monitorHealth(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance());
      }
      
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto transitionToActive(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance());
      }
      
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto transitionToStandby(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(2),
          controller,
          request,
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance());
      }
      
      
      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto getServiceStatus(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(3),
          controller,
          request,
          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance());
      }
      
    }
  }
  
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_HAStateChangeRequestInfoProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_HAStateChangeRequestInfoProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_MonitorHealthRequestProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_MonitorHealthRequestProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_MonitorHealthResponseProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_MonitorHealthResponseProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_TransitionToActiveRequestProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_TransitionToActiveRequestProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_TransitionToActiveResponseProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_TransitionToActiveResponseProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_TransitionToStandbyRequestProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_TransitionToStandbyRequestProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_TransitionToStandbyResponseProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_TransitionToStandbyResponseProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_GetServiceStatusRequestProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_GetServiceStatusRequestProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_GetServiceStatusResponseProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_GetServiceStatusResponseProto_fieldAccessorTable;
  
  public static com.google.protobuf.Descriptors.FileDescriptor
      getDescriptor() {
    return descriptor;
  }
  private static com.google.protobuf.Descriptors.FileDescriptor
      descriptor;
  static {
    java.lang.String[] descriptorData = {
      "\n\027HAServiceProtocol.proto\"D\n\035HAStateChan" +
      "geRequestInfoProto\022#\n\treqSource\030\001 \002(\0162\020." +
      "HARequestSource\"\033\n\031MonitorHealthRequestP" +
      "roto\"\034\n\032MonitorHealthResponseProto\"Q\n\036Tr" +
      "ansitionToActiveRequestProto\022/\n\007reqInfo\030" +
      "\001 \002(\0132\036.HAStateChangeRequestInfoProto\"!\n" +
      "\037TransitionToActiveResponseProto\"R\n\037Tran" +
      "sitionToStandbyRequestProto\022/\n\007reqInfo\030\001" +
      " \002(\0132\036.HAStateChangeRequestInfoProto\"\"\n " +
      "TransitionToStandbyResponseProto\"\036\n\034GetS",
      "erviceStatusRequestProto\"y\n\035GetServiceSt" +
      "atusResponseProto\022#\n\005state\030\001 \002(\0162\024.HASer" +
      "viceStateProto\022\033\n\023readyToBecomeActive\030\002 " +
      "\001(\010\022\026\n\016notReadyReason\030\003 \001(\t*@\n\023HAService" +
      "StateProto\022\020\n\014INITIALIZING\020\000\022\n\n\006ACTIVE\020\001" +
      "\022\013\n\007STANDBY\020\002*W\n\017HARequestSource\022\023\n\017REQU" +
      "EST_BY_USER\020\000\022\032\n\026REQUEST_BY_USER_FORCED\020" +
      "\001\022\023\n\017REQUEST_BY_ZKFC\020\0022\354\002\n\030HAServiceProt" +
      "ocolService\022H\n\rmonitorHealth\022\032.MonitorHe" +
      "althRequestProto\032\033.MonitorHealthResponse",
      "Proto\022W\n\022transitionToActive\022\037.Transition" +
      "ToActiveRequestProto\032 .TransitionToActiv" +
      "eResponseProto\022Z\n\023transitionToStandby\022 ." +
      "TransitionToStandbyRequestProto\032!.Transi" +
      "tionToStandbyResponseProto\022Q\n\020getService" +
      "Status\022\035.GetServiceStatusRequestProto\032\036." +
      "GetServiceStatusResponseProtoB;\n\032org.apa" +
      "che.hadoop.ha.protoB\027HAServiceProtocolPr" +
      "otos\210\001\001\240\001\001"
    };
    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
        public com.google.protobuf.ExtensionRegistry assignDescriptors(
            com.google.protobuf.Descriptors.FileDescriptor root) {
          descriptor = root;
          internal_static_HAStateChangeRequestInfoProto_descriptor =
            getDescriptor().getMessageTypes().get(0);
          internal_static_HAStateChangeRequestInfoProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_HAStateChangeRequestInfoProto_descriptor,
              new java.lang.String[] { "ReqSource", },
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.class,
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder.class);
          internal_static_MonitorHealthRequestProto_descriptor =
            getDescriptor().getMessageTypes().get(1);
          internal_static_MonitorHealthRequestProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_MonitorHealthRequestProto_descriptor,
              new java.lang.String[] { },
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.class,
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.Builder.class);
          internal_static_MonitorHealthResponseProto_descriptor =
            getDescriptor().getMessageTypes().get(2);
          internal_static_MonitorHealthResponseProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_MonitorHealthResponseProto_descriptor,
              new java.lang.String[] { },
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.class,
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.Builder.class);
          internal_static_TransitionToActiveRequestProto_descriptor =
            getDescriptor().getMessageTypes().get(3);
          internal_static_TransitionToActiveRequestProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_TransitionToActiveRequestProto_descriptor,
              new java.lang.String[] { "ReqInfo", },
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.class,
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.Builder.class);
          internal_static_TransitionToActiveResponseProto_descriptor =
            getDescriptor().getMessageTypes().get(4);
          internal_static_TransitionToActiveResponseProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_TransitionToActiveResponseProto_descriptor,
              new java.lang.String[] { },
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.class,
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.Builder.class);
          internal_static_TransitionToStandbyRequestProto_descriptor =
            getDescriptor().getMessageTypes().get(5);
          internal_static_TransitionToStandbyRequestProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_TransitionToStandbyRequestProto_descriptor,
              new java.lang.String[] { "ReqInfo", },
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.class,
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.Builder.class);
          internal_static_TransitionToStandbyResponseProto_descriptor =
            getDescriptor().getMessageTypes().get(6);
          internal_static_TransitionToStandbyResponseProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_TransitionToStandbyResponseProto_descriptor,
              new java.lang.String[] { },
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.class,
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.Builder.class);
          internal_static_GetServiceStatusRequestProto_descriptor =
            getDescriptor().getMessageTypes().get(7);
          internal_static_GetServiceStatusRequestProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_GetServiceStatusRequestProto_descriptor,
              new java.lang.String[] { },
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.class,
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.Builder.class);
          internal_static_GetServiceStatusResponseProto_descriptor =
            getDescriptor().getMessageTypes().get(8);
          internal_static_GetServiceStatusResponseProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_GetServiceStatusResponseProto_descriptor,
              new java.lang.String[] { "State", "ReadyToBecomeActive", "NotReadyReason", },
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.class,
              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.Builder.class);
          return null;
        }
      };
    com.google.protobuf.Descriptors.FileDescriptor
      .internalBuildGeneratedFileFrom(descriptorData,
        new com.google.protobuf.Descriptors.FileDescriptor[] {
        }, assigner);
  }
  
  // @@protoc_insertion_point(outer_class_scope)
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy