All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.apache.hadoop.yarn.proto.YarnServerCommonProtos Maven / Gradle / Ivy

There is a newer version: 3.4.0
Show newest version
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: yarn_server_common_protos.proto

package org.apache.hadoop.yarn.proto;

public final class YarnServerCommonProtos {
  private YarnServerCommonProtos() {}
  public static void registerAllExtensions(
      org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite registry) {
  }

  public static void registerAllExtensions(
      org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry registry) {
    registerAllExtensions(
        (org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite) registry);
  }
  /**
   * Protobuf enum {@code hadoop.yarn.NodeActionProto}
   */
  public enum NodeActionProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * NORMAL = 0;
     */
    NORMAL(0),
    /**
     * RESYNC = 1;
     */
    RESYNC(1),
    /**
     * SHUTDOWN = 2;
     */
    SHUTDOWN(2),
    ;

    /**
     * NORMAL = 0;
     */
    public static final int NORMAL_VALUE = 0;
    /**
     * RESYNC = 1;
     */
    public static final int RESYNC_VALUE = 1;
    /**
     * SHUTDOWN = 2;
     */
    public static final int SHUTDOWN_VALUE = 2;


    public final int getNumber() {
      return value;
    }

    /**
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static NodeActionProto valueOf(int value) {
      return forNumber(value);
    }

    public static NodeActionProto forNumber(int value) {
      switch (value) {
        case 0: return NORMAL;
        case 1: return RESYNC;
        case 2: return SHUTDOWN;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        NodeActionProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() {
            public NodeActionProto findValueByNumber(int number) {
              return NodeActionProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.getDescriptor().getEnumTypes().get(0);
    }

    private static final NodeActionProto[] VALUES = values();

    public static NodeActionProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private NodeActionProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.NodeActionProto)
  }

  public interface NodeStatusProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeStatusProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * optional .hadoop.yarn.NodeIdProto node_id = 1;
     */
    boolean hasNodeId();
    /**
     * optional .hadoop.yarn.NodeIdProto node_id = 1;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId();
    /**
     * optional .hadoop.yarn.NodeIdProto node_id = 1;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder();

    /**
     * optional int32 response_id = 2;
     */
    boolean hasResponseId();
    /**
     * optional int32 response_id = 2;
     */
    int getResponseId();

    /**
     * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
     */
    java.util.List 
        getContainersStatusesList();
    /**
     * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto getContainersStatuses(int index);
    /**
     * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
     */
    int getContainersStatusesCount();
    /**
     * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
     */
    java.util.List 
        getContainersStatusesOrBuilderList();
    /**
     * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder getContainersStatusesOrBuilder(
        int index);

    /**
     * optional .hadoop.yarn.NodeHealthStatusProto nodeHealthStatus = 4;
     */
    boolean hasNodeHealthStatus();
    /**
     * optional .hadoop.yarn.NodeHealthStatusProto nodeHealthStatus = 4;
     */
    org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto getNodeHealthStatus();
    /**
     * optional .hadoop.yarn.NodeHealthStatusProto nodeHealthStatus = 4;
     */
    org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProtoOrBuilder getNodeHealthStatusOrBuilder();

    /**
     * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
     */
    java.util.List 
        getKeepAliveApplicationsList();
    /**
     * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getKeepAliveApplications(int index);
    /**
     * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
     */
    int getKeepAliveApplicationsCount();
    /**
     * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
     */
    java.util.List 
        getKeepAliveApplicationsOrBuilderList();
    /**
     * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getKeepAliveApplicationsOrBuilder(
        int index);

    /**
     * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 6;
     */
    boolean hasContainersUtilization();
    /**
     * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 6;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto getContainersUtilization();
    /**
     * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 6;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder getContainersUtilizationOrBuilder();

    /**
     * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 7;
     */
    boolean hasNodeUtilization();
    /**
     * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 7;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto getNodeUtilization();
    /**
     * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 7;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder getNodeUtilizationOrBuilder();

    /**
     * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
     */
    java.util.List 
        getIncreasedContainersList();
    /**
     * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getIncreasedContainers(int index);
    /**
     * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
     */
    int getIncreasedContainersCount();
    /**
     * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
     */
    java.util.List 
        getIncreasedContainersOrBuilderList();
    /**
     * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getIncreasedContainersOrBuilder(
        int index);

    /**
     * optional .hadoop.yarn.OpportunisticContainersStatusProto opportunistic_containers_status = 9;
     */
    boolean hasOpportunisticContainersStatus();
    /**
     * optional .hadoop.yarn.OpportunisticContainersStatusProto opportunistic_containers_status = 9;
     */
    org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto getOpportunisticContainersStatus();
    /**
     * optional .hadoop.yarn.OpportunisticContainersStatusProto opportunistic_containers_status = 9;
     */
    org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProtoOrBuilder getOpportunisticContainersStatusOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.NodeStatusProto}
   */
  public  static final class NodeStatusProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.NodeStatusProto)
      NodeStatusProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use NodeStatusProto.newBuilder() to construct.
    private NodeStatusProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) {
      super(builder);
    }
    private NodeStatusProto() {
      containersStatuses_ = java.util.Collections.emptyList();
      keepAliveApplications_ = java.util.Collections.emptyList();
      increasedContainers_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
    getUnknownFields() {
      return this.unknownFields;
    }
    private NodeStatusProto(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      this();
      if (extensionRegistry == null) {
        throw new java.lang.NullPointerException();
      }
      int mutable_bitField0_ = 0;
      org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
          org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            case 10: {
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) != 0)) {
                subBuilder = nodeId_.toBuilder();
              }
              nodeId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(nodeId_);
                nodeId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              responseId_ = input.readInt32();
              break;
            }
            case 26: {
              if (!((mutable_bitField0_ & 0x00000004) != 0)) {
                containersStatuses_ = new java.util.ArrayList();
                mutable_bitField0_ |= 0x00000004;
              }
              containersStatuses_.add(
                  input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.PARSER, extensionRegistry));
              break;
            }
            case 34: {
              org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000004) != 0)) {
                subBuilder = nodeHealthStatus_.toBuilder();
              }
              nodeHealthStatus_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(nodeHealthStatus_);
                nodeHealthStatus_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000004;
              break;
            }
            case 42: {
              if (!((mutable_bitField0_ & 0x00000010) != 0)) {
                keepAliveApplications_ = new java.util.ArrayList();
                mutable_bitField0_ |= 0x00000010;
              }
              keepAliveApplications_.add(
                  input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.PARSER, extensionRegistry));
              break;
            }
            case 50: {
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000008) != 0)) {
                subBuilder = containersUtilization_.toBuilder();
              }
              containersUtilization_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(containersUtilization_);
                containersUtilization_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000008;
              break;
            }
            case 58: {
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000010) != 0)) {
                subBuilder = nodeUtilization_.toBuilder();
              }
              nodeUtilization_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(nodeUtilization_);
                nodeUtilization_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000010;
              break;
            }
            case 66: {
              if (!((mutable_bitField0_ & 0x00000080) != 0)) {
                increasedContainers_ = new java.util.ArrayList();
                mutable_bitField0_ |= 0x00000080;
              }
              increasedContainers_.add(
                  input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.PARSER, extensionRegistry));
              break;
            }
            case 74: {
              org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000020) != 0)) {
                subBuilder = opportunisticContainersStatus_.toBuilder();
              }
              opportunisticContainersStatus_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(opportunisticContainersStatus_);
                opportunisticContainersStatus_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000020;
              break;
            }
            default: {
              if (!parseUnknownField(
                  input, unknownFields, extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
          }
        }
      } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
            e).setUnfinishedMessage(this);
      } finally {
        if (((mutable_bitField0_ & 0x00000004) != 0)) {
          containersStatuses_ = java.util.Collections.unmodifiableList(containersStatuses_);
        }
        if (((mutable_bitField0_ & 0x00000010) != 0)) {
          keepAliveApplications_ = java.util.Collections.unmodifiableList(keepAliveApplications_);
        }
        if (((mutable_bitField0_ & 0x00000080) != 0)) {
          increasedContainers_ = java.util.Collections.unmodifiableList(increasedContainers_);
        }
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_NodeStatusProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_NodeStatusProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.Builder.class);
    }

    private int bitField0_;
    public static final int NODE_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_;
    /**
     * optional .hadoop.yarn.NodeIdProto node_id = 1;
     */
    public boolean hasNodeId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * optional .hadoop.yarn.NodeIdProto node_id = 1;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() {
      return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
    }
    /**
     * optional .hadoop.yarn.NodeIdProto node_id = 1;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() {
      return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
    }

    public static final int RESPONSE_ID_FIELD_NUMBER = 2;
    private int responseId_;
    /**
     * optional int32 response_id = 2;
     */
    public boolean hasResponseId() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * optional int32 response_id = 2;
     */
    public int getResponseId() {
      return responseId_;
    }

    public static final int CONTAINERSSTATUSES_FIELD_NUMBER = 3;
    private java.util.List containersStatuses_;
    /**
     * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
     */
    public java.util.List getContainersStatusesList() {
      return containersStatuses_;
    }
    /**
     * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
     */
    public java.util.List 
        getContainersStatusesOrBuilderList() {
      return containersStatuses_;
    }
    /**
     * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
     */
    public int getContainersStatusesCount() {
      return containersStatuses_.size();
    }
    /**
     * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto getContainersStatuses(int index) {
      return containersStatuses_.get(index);
    }
    /**
     * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder getContainersStatusesOrBuilder(
        int index) {
      return containersStatuses_.get(index);
    }

    public static final int NODEHEALTHSTATUS_FIELD_NUMBER = 4;
    private org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto nodeHealthStatus_;
    /**
     * optional .hadoop.yarn.NodeHealthStatusProto nodeHealthStatus = 4;
     */
    public boolean hasNodeHealthStatus() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * optional .hadoop.yarn.NodeHealthStatusProto nodeHealthStatus = 4;
     */
    public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto getNodeHealthStatus() {
      return nodeHealthStatus_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto.getDefaultInstance() : nodeHealthStatus_;
    }
    /**
     * optional .hadoop.yarn.NodeHealthStatusProto nodeHealthStatus = 4;
     */
    public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProtoOrBuilder getNodeHealthStatusOrBuilder() {
      return nodeHealthStatus_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto.getDefaultInstance() : nodeHealthStatus_;
    }

    public static final int KEEP_ALIVE_APPLICATIONS_FIELD_NUMBER = 5;
    private java.util.List keepAliveApplications_;
    /**
     * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
     */
    public java.util.List getKeepAliveApplicationsList() {
      return keepAliveApplications_;
    }
    /**
     * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
     */
    public java.util.List 
        getKeepAliveApplicationsOrBuilderList() {
      return keepAliveApplications_;
    }
    /**
     * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
     */
    public int getKeepAliveApplicationsCount() {
      return keepAliveApplications_.size();
    }
    /**
     * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getKeepAliveApplications(int index) {
      return keepAliveApplications_.get(index);
    }
    /**
     * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getKeepAliveApplicationsOrBuilder(
        int index) {
      return keepAliveApplications_.get(index);
    }

    public static final int CONTAINERS_UTILIZATION_FIELD_NUMBER = 6;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto containersUtilization_;
    /**
     * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 6;
     */
    public boolean hasContainersUtilization() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 6;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto getContainersUtilization() {
      return containersUtilization_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance() : containersUtilization_;
    }
    /**
     * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 6;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder getContainersUtilizationOrBuilder() {
      return containersUtilization_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance() : containersUtilization_;
    }

    public static final int NODE_UTILIZATION_FIELD_NUMBER = 7;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto nodeUtilization_;
    /**
     * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 7;
     */
    public boolean hasNodeUtilization() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 7;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto getNodeUtilization() {
      return nodeUtilization_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance() : nodeUtilization_;
    }
    /**
     * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 7;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder getNodeUtilizationOrBuilder() {
      return nodeUtilization_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance() : nodeUtilization_;
    }

    public static final int INCREASED_CONTAINERS_FIELD_NUMBER = 8;
    private java.util.List increasedContainers_;
    /**
     * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
     */
    public java.util.List getIncreasedContainersList() {
      return increasedContainers_;
    }
    /**
     * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
     */
    public java.util.List 
        getIncreasedContainersOrBuilderList() {
      return increasedContainers_;
    }
    /**
     * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
     */
    public int getIncreasedContainersCount() {
      return increasedContainers_.size();
    }
    /**
     * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getIncreasedContainers(int index) {
      return increasedContainers_.get(index);
    }
    /**
     * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getIncreasedContainersOrBuilder(
        int index) {
      return increasedContainers_.get(index);
    }

    public static final int OPPORTUNISTIC_CONTAINERS_STATUS_FIELD_NUMBER = 9;
    private org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto opportunisticContainersStatus_;
    /**
     * optional .hadoop.yarn.OpportunisticContainersStatusProto opportunistic_containers_status = 9;
     */
    public boolean hasOpportunisticContainersStatus() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * optional .hadoop.yarn.OpportunisticContainersStatusProto opportunistic_containers_status = 9;
     */
    public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto getOpportunisticContainersStatus() {
      return opportunisticContainersStatus_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto.getDefaultInstance() : opportunisticContainersStatus_;
    }
    /**
     * optional .hadoop.yarn.OpportunisticContainersStatusProto opportunistic_containers_status = 9;
     */
    public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProtoOrBuilder getOpportunisticContainersStatusOrBuilder() {
      return opportunisticContainersStatus_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto.getDefaultInstance() : opportunisticContainersStatus_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      for (int i = 0; i < getContainersStatusesCount(); i++) {
        if (!getContainersStatuses(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      if (hasContainersUtilization()) {
        if (!getContainersUtilization().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      if (hasNodeUtilization()) {
        if (!getNodeUtilization().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      for (int i = 0; i < getIncreasedContainersCount(); i++) {
        if (!getIncreasedContainers(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getNodeId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt32(2, responseId_);
      }
      for (int i = 0; i < containersStatuses_.size(); i++) {
        output.writeMessage(3, containersStatuses_.get(i));
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeMessage(4, getNodeHealthStatus());
      }
      for (int i = 0; i < keepAliveApplications_.size(); i++) {
        output.writeMessage(5, keepAliveApplications_.get(i));
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeMessage(6, getContainersUtilization());
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeMessage(7, getNodeUtilization());
      }
      for (int i = 0; i < increasedContainers_.size(); i++) {
        output.writeMessage(8, increasedContainers_.get(i));
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        output.writeMessage(9, getOpportunisticContainersStatus());
      }
      unknownFields.writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getNodeId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(2, responseId_);
      }
      for (int i = 0; i < containersStatuses_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(3, containersStatuses_.get(i));
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(4, getNodeHealthStatus());
      }
      for (int i = 0; i < keepAliveApplications_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(5, keepAliveApplications_.get(i));
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(6, getContainersUtilization());
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(7, getNodeUtilization());
      }
      for (int i = 0; i < increasedContainers_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(8, increasedContainers_.get(i));
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(9, getOpportunisticContainersStatus());
      }
      size += unknownFields.getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto) obj;

      if (hasNodeId() != other.hasNodeId()) return false;
      if (hasNodeId()) {
        if (!getNodeId()
            .equals(other.getNodeId())) return false;
      }
      if (hasResponseId() != other.hasResponseId()) return false;
      if (hasResponseId()) {
        if (getResponseId()
            != other.getResponseId()) return false;
      }
      if (!getContainersStatusesList()
          .equals(other.getContainersStatusesList())) return false;
      if (hasNodeHealthStatus() != other.hasNodeHealthStatus()) return false;
      if (hasNodeHealthStatus()) {
        if (!getNodeHealthStatus()
            .equals(other.getNodeHealthStatus())) return false;
      }
      if (!getKeepAliveApplicationsList()
          .equals(other.getKeepAliveApplicationsList())) return false;
      if (hasContainersUtilization() != other.hasContainersUtilization()) return false;
      if (hasContainersUtilization()) {
        if (!getContainersUtilization()
            .equals(other.getContainersUtilization())) return false;
      }
      if (hasNodeUtilization() != other.hasNodeUtilization()) return false;
      if (hasNodeUtilization()) {
        if (!getNodeUtilization()
            .equals(other.getNodeUtilization())) return false;
      }
      if (!getIncreasedContainersList()
          .equals(other.getIncreasedContainersList())) return false;
      if (hasOpportunisticContainersStatus() != other.hasOpportunisticContainersStatus()) return false;
      if (hasOpportunisticContainersStatus()) {
        if (!getOpportunisticContainersStatus()
            .equals(other.getOpportunisticContainersStatus())) return false;
      }
      if (!unknownFields.equals(other.unknownFields)) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasNodeId()) {
        hash = (37 * hash) + NODE_ID_FIELD_NUMBER;
        hash = (53 * hash) + getNodeId().hashCode();
      }
      if (hasResponseId()) {
        hash = (37 * hash) + RESPONSE_ID_FIELD_NUMBER;
        hash = (53 * hash) + getResponseId();
      }
      if (getContainersStatusesCount() > 0) {
        hash = (37 * hash) + CONTAINERSSTATUSES_FIELD_NUMBER;
        hash = (53 * hash) + getContainersStatusesList().hashCode();
      }
      if (hasNodeHealthStatus()) {
        hash = (37 * hash) + NODEHEALTHSTATUS_FIELD_NUMBER;
        hash = (53 * hash) + getNodeHealthStatus().hashCode();
      }
      if (getKeepAliveApplicationsCount() > 0) {
        hash = (37 * hash) + KEEP_ALIVE_APPLICATIONS_FIELD_NUMBER;
        hash = (53 * hash) + getKeepAliveApplicationsList().hashCode();
      }
      if (hasContainersUtilization()) {
        hash = (37 * hash) + CONTAINERS_UTILIZATION_FIELD_NUMBER;
        hash = (53 * hash) + getContainersUtilization().hashCode();
      }
      if (hasNodeUtilization()) {
        hash = (37 * hash) + NODE_UTILIZATION_FIELD_NUMBER;
        hash = (53 * hash) + getNodeUtilization().hashCode();
      }
      if (getIncreasedContainersCount() > 0) {
        hash = (37 * hash) + INCREASED_CONTAINERS_FIELD_NUMBER;
        hash = (53 * hash) + getIncreasedContainersList().hashCode();
      }
      if (hasOpportunisticContainersStatus()) {
        hash = (37 * hash) + OPPORTUNISTIC_CONTAINERS_STATUS_FIELD_NUMBER;
        hash = (53 * hash) + getOpportunisticContainersStatus().hashCode();
      }
      hash = (29 * hash) + unknownFields.hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.NodeStatusProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeStatusProto)
        org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_NodeStatusProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_NodeStatusProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getNodeIdFieldBuilder();
          getContainersStatusesFieldBuilder();
          getNodeHealthStatusFieldBuilder();
          getKeepAliveApplicationsFieldBuilder();
          getContainersUtilizationFieldBuilder();
          getNodeUtilizationFieldBuilder();
          getIncreasedContainersFieldBuilder();
          getOpportunisticContainersStatusFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        if (nodeIdBuilder_ == null) {
          nodeId_ = null;
        } else {
          nodeIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        responseId_ = 0;
        bitField0_ = (bitField0_ & ~0x00000002);
        if (containersStatusesBuilder_ == null) {
          containersStatuses_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000004);
        } else {
          containersStatusesBuilder_.clear();
        }
        if (nodeHealthStatusBuilder_ == null) {
          nodeHealthStatus_ = null;
        } else {
          nodeHealthStatusBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000008);
        if (keepAliveApplicationsBuilder_ == null) {
          keepAliveApplications_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000010);
        } else {
          keepAliveApplicationsBuilder_.clear();
        }
        if (containersUtilizationBuilder_ == null) {
          containersUtilization_ = null;
        } else {
          containersUtilizationBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000020);
        if (nodeUtilizationBuilder_ == null) {
          nodeUtilization_ = null;
        } else {
          nodeUtilizationBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000040);
        if (increasedContainersBuilder_ == null) {
          increasedContainers_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000080);
        } else {
          increasedContainersBuilder_.clear();
        }
        if (opportunisticContainersStatusBuilder_ == null) {
          opportunisticContainersStatus_ = null;
        } else {
          opportunisticContainersStatusBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000100);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_NodeStatusProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto build() {
        org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          if (nodeIdBuilder_ == null) {
            result.nodeId_ = nodeId_;
          } else {
            result.nodeId_ = nodeIdBuilder_.build();
          }
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.responseId_ = responseId_;
          to_bitField0_ |= 0x00000002;
        }
        if (containersStatusesBuilder_ == null) {
          if (((bitField0_ & 0x00000004) != 0)) {
            containersStatuses_ = java.util.Collections.unmodifiableList(containersStatuses_);
            bitField0_ = (bitField0_ & ~0x00000004);
          }
          result.containersStatuses_ = containersStatuses_;
        } else {
          result.containersStatuses_ = containersStatusesBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          if (nodeHealthStatusBuilder_ == null) {
            result.nodeHealthStatus_ = nodeHealthStatus_;
          } else {
            result.nodeHealthStatus_ = nodeHealthStatusBuilder_.build();
          }
          to_bitField0_ |= 0x00000004;
        }
        if (keepAliveApplicationsBuilder_ == null) {
          if (((bitField0_ & 0x00000010) != 0)) {
            keepAliveApplications_ = java.util.Collections.unmodifiableList(keepAliveApplications_);
            bitField0_ = (bitField0_ & ~0x00000010);
          }
          result.keepAliveApplications_ = keepAliveApplications_;
        } else {
          result.keepAliveApplications_ = keepAliveApplicationsBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000020) != 0)) {
          if (containersUtilizationBuilder_ == null) {
            result.containersUtilization_ = containersUtilization_;
          } else {
            result.containersUtilization_ = containersUtilizationBuilder_.build();
          }
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000040) != 0)) {
          if (nodeUtilizationBuilder_ == null) {
            result.nodeUtilization_ = nodeUtilization_;
          } else {
            result.nodeUtilization_ = nodeUtilizationBuilder_.build();
          }
          to_bitField0_ |= 0x00000010;
        }
        if (increasedContainersBuilder_ == null) {
          if (((bitField0_ & 0x00000080) != 0)) {
            increasedContainers_ = java.util.Collections.unmodifiableList(increasedContainers_);
            bitField0_ = (bitField0_ & ~0x00000080);
          }
          result.increasedContainers_ = increasedContainers_;
        } else {
          result.increasedContainers_ = increasedContainersBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000100) != 0)) {
          if (opportunisticContainersStatusBuilder_ == null) {
            result.opportunisticContainersStatus_ = opportunisticContainersStatus_;
          } else {
            result.opportunisticContainersStatus_ = opportunisticContainersStatusBuilder_.build();
          }
          to_bitField0_ |= 0x00000020;
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.getDefaultInstance()) return this;
        if (other.hasNodeId()) {
          mergeNodeId(other.getNodeId());
        }
        if (other.hasResponseId()) {
          setResponseId(other.getResponseId());
        }
        if (containersStatusesBuilder_ == null) {
          if (!other.containersStatuses_.isEmpty()) {
            if (containersStatuses_.isEmpty()) {
              containersStatuses_ = other.containersStatuses_;
              bitField0_ = (bitField0_ & ~0x00000004);
            } else {
              ensureContainersStatusesIsMutable();
              containersStatuses_.addAll(other.containersStatuses_);
            }
            onChanged();
          }
        } else {
          if (!other.containersStatuses_.isEmpty()) {
            if (containersStatusesBuilder_.isEmpty()) {
              containersStatusesBuilder_.dispose();
              containersStatusesBuilder_ = null;
              containersStatuses_ = other.containersStatuses_;
              bitField0_ = (bitField0_ & ~0x00000004);
              containersStatusesBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getContainersStatusesFieldBuilder() : null;
            } else {
              containersStatusesBuilder_.addAllMessages(other.containersStatuses_);
            }
          }
        }
        if (other.hasNodeHealthStatus()) {
          mergeNodeHealthStatus(other.getNodeHealthStatus());
        }
        if (keepAliveApplicationsBuilder_ == null) {
          if (!other.keepAliveApplications_.isEmpty()) {
            if (keepAliveApplications_.isEmpty()) {
              keepAliveApplications_ = other.keepAliveApplications_;
              bitField0_ = (bitField0_ & ~0x00000010);
            } else {
              ensureKeepAliveApplicationsIsMutable();
              keepAliveApplications_.addAll(other.keepAliveApplications_);
            }
            onChanged();
          }
        } else {
          if (!other.keepAliveApplications_.isEmpty()) {
            if (keepAliveApplicationsBuilder_.isEmpty()) {
              keepAliveApplicationsBuilder_.dispose();
              keepAliveApplicationsBuilder_ = null;
              keepAliveApplications_ = other.keepAliveApplications_;
              bitField0_ = (bitField0_ & ~0x00000010);
              keepAliveApplicationsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getKeepAliveApplicationsFieldBuilder() : null;
            } else {
              keepAliveApplicationsBuilder_.addAllMessages(other.keepAliveApplications_);
            }
          }
        }
        if (other.hasContainersUtilization()) {
          mergeContainersUtilization(other.getContainersUtilization());
        }
        if (other.hasNodeUtilization()) {
          mergeNodeUtilization(other.getNodeUtilization());
        }
        if (increasedContainersBuilder_ == null) {
          if (!other.increasedContainers_.isEmpty()) {
            if (increasedContainers_.isEmpty()) {
              increasedContainers_ = other.increasedContainers_;
              bitField0_ = (bitField0_ & ~0x00000080);
            } else {
              ensureIncreasedContainersIsMutable();
              increasedContainers_.addAll(other.increasedContainers_);
            }
            onChanged();
          }
        } else {
          if (!other.increasedContainers_.isEmpty()) {
            if (increasedContainersBuilder_.isEmpty()) {
              increasedContainersBuilder_.dispose();
              increasedContainersBuilder_ = null;
              increasedContainers_ = other.increasedContainers_;
              bitField0_ = (bitField0_ & ~0x00000080);
              increasedContainersBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getIncreasedContainersFieldBuilder() : null;
            } else {
              increasedContainersBuilder_.addAllMessages(other.increasedContainers_);
            }
          }
        }
        if (other.hasOpportunisticContainersStatus()) {
          mergeOpportunisticContainersStatus(other.getOpportunisticContainersStatus());
        }
        this.mergeUnknownFields(other.unknownFields);
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        for (int i = 0; i < getContainersStatusesCount(); i++) {
          if (!getContainersStatuses(i).isInitialized()) {
            return false;
          }
        }
        if (hasContainersUtilization()) {
          if (!getContainersUtilization().isInitialized()) {
            return false;
          }
        }
        if (hasNodeUtilization()) {
          if (!getNodeUtilization().isInitialized()) {
            return false;
          }
        }
        for (int i = 0; i < getIncreasedContainersCount(); i++) {
          if (!getIncreasedContainers(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto) e.getUnfinishedMessage();
          throw e.unwrapIOException();
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> nodeIdBuilder_;
      /**
       * optional .hadoop.yarn.NodeIdProto node_id = 1;
       */
      public boolean hasNodeId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * optional .hadoop.yarn.NodeIdProto node_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() {
        if (nodeIdBuilder_ == null) {
          return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
        } else {
          return nodeIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.NodeIdProto node_id = 1;
       */
      public Builder setNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
        if (nodeIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          nodeId_ = value;
          onChanged();
        } else {
          nodeIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.NodeIdProto node_id = 1;
       */
      public Builder setNodeId(
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) {
        if (nodeIdBuilder_ == null) {
          nodeId_ = builderForValue.build();
          onChanged();
        } else {
          nodeIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.NodeIdProto node_id = 1;
       */
      public Builder mergeNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
        if (nodeIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
              nodeId_ != null &&
              nodeId_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance()) {
            nodeId_ =
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.newBuilder(nodeId_).mergeFrom(value).buildPartial();
          } else {
            nodeId_ = value;
          }
          onChanged();
        } else {
          nodeIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.NodeIdProto node_id = 1;
       */
      public Builder clearNodeId() {
        if (nodeIdBuilder_ == null) {
          nodeId_ = null;
          onChanged();
        } else {
          nodeIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.yarn.NodeIdProto node_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder getNodeIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getNodeIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.NodeIdProto node_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() {
        if (nodeIdBuilder_ != null) {
          return nodeIdBuilder_.getMessageOrBuilder();
        } else {
          return nodeId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
        }
      }
      /**
       * optional .hadoop.yarn.NodeIdProto node_id = 1;
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> 
          getNodeIdFieldBuilder() {
        if (nodeIdBuilder_ == null) {
          nodeIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>(
                  getNodeId(),
                  getParentForChildren(),
                  isClean());
          nodeId_ = null;
        }
        return nodeIdBuilder_;
      }

      private int responseId_ ;
      /**
       * optional int32 response_id = 2;
       */
      public boolean hasResponseId() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * optional int32 response_id = 2;
       */
      public int getResponseId() {
        return responseId_;
      }
      /**
       * optional int32 response_id = 2;
       */
      public Builder setResponseId(int value) {
        bitField0_ |= 0x00000002;
        responseId_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 response_id = 2;
       */
      public Builder clearResponseId() {
        bitField0_ = (bitField0_ & ~0x00000002);
        responseId_ = 0;
        onChanged();
        return this;
      }

      private java.util.List containersStatuses_ =
        java.util.Collections.emptyList();
      private void ensureContainersStatusesIsMutable() {
        if (!((bitField0_ & 0x00000004) != 0)) {
          containersStatuses_ = new java.util.ArrayList(containersStatuses_);
          bitField0_ |= 0x00000004;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder> containersStatusesBuilder_;

      /**
       * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
       */
      public java.util.List getContainersStatusesList() {
        if (containersStatusesBuilder_ == null) {
          return java.util.Collections.unmodifiableList(containersStatuses_);
        } else {
          return containersStatusesBuilder_.getMessageList();
        }
      }
      /**
       * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
       */
      public int getContainersStatusesCount() {
        if (containersStatusesBuilder_ == null) {
          return containersStatuses_.size();
        } else {
          return containersStatusesBuilder_.getCount();
        }
      }
      /**
       * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto getContainersStatuses(int index) {
        if (containersStatusesBuilder_ == null) {
          return containersStatuses_.get(index);
        } else {
          return containersStatusesBuilder_.getMessage(index);
        }
      }
      /**
       * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
       */
      public Builder setContainersStatuses(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto value) {
        if (containersStatusesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainersStatusesIsMutable();
          containersStatuses_.set(index, value);
          onChanged();
        } else {
          containersStatusesBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
       */
      public Builder setContainersStatuses(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder builderForValue) {
        if (containersStatusesBuilder_ == null) {
          ensureContainersStatusesIsMutable();
          containersStatuses_.set(index, builderForValue.build());
          onChanged();
        } else {
          containersStatusesBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
       */
      public Builder addContainersStatuses(org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto value) {
        if (containersStatusesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainersStatusesIsMutable();
          containersStatuses_.add(value);
          onChanged();
        } else {
          containersStatusesBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
       */
      public Builder addContainersStatuses(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto value) {
        if (containersStatusesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainersStatusesIsMutable();
          containersStatuses_.add(index, value);
          onChanged();
        } else {
          containersStatusesBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
       */
      public Builder addContainersStatuses(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder builderForValue) {
        if (containersStatusesBuilder_ == null) {
          ensureContainersStatusesIsMutable();
          containersStatuses_.add(builderForValue.build());
          onChanged();
        } else {
          containersStatusesBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
       */
      public Builder addContainersStatuses(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder builderForValue) {
        if (containersStatusesBuilder_ == null) {
          ensureContainersStatusesIsMutable();
          containersStatuses_.add(index, builderForValue.build());
          onChanged();
        } else {
          containersStatusesBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
       */
      public Builder addAllContainersStatuses(
          java.lang.Iterable values) {
        if (containersStatusesBuilder_ == null) {
          ensureContainersStatusesIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, containersStatuses_);
          onChanged();
        } else {
          containersStatusesBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
       */
      public Builder clearContainersStatuses() {
        if (containersStatusesBuilder_ == null) {
          containersStatuses_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000004);
          onChanged();
        } else {
          containersStatusesBuilder_.clear();
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
       */
      public Builder removeContainersStatuses(int index) {
        if (containersStatusesBuilder_ == null) {
          ensureContainersStatusesIsMutable();
          containersStatuses_.remove(index);
          onChanged();
        } else {
          containersStatusesBuilder_.remove(index);
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder getContainersStatusesBuilder(
          int index) {
        return getContainersStatusesFieldBuilder().getBuilder(index);
      }
      /**
       * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder getContainersStatusesOrBuilder(
          int index) {
        if (containersStatusesBuilder_ == null) {
          return containersStatuses_.get(index);  } else {
          return containersStatusesBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
       */
      public java.util.List 
           getContainersStatusesOrBuilderList() {
        if (containersStatusesBuilder_ != null) {
          return containersStatusesBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(containersStatuses_);
        }
      }
      /**
       * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder addContainersStatusesBuilder() {
        return getContainersStatusesFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.getDefaultInstance());
      }
      /**
       * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder addContainersStatusesBuilder(
          int index) {
        return getContainersStatusesFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.getDefaultInstance());
      }
      /**
       * repeated .hadoop.yarn.ContainerStatusProto containersStatuses = 3;
       */
      public java.util.List 
           getContainersStatusesBuilderList() {
        return getContainersStatusesFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder> 
          getContainersStatusesFieldBuilder() {
        if (containersStatusesBuilder_ == null) {
          containersStatusesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder>(
                  containersStatuses_,
                  ((bitField0_ & 0x00000004) != 0),
                  getParentForChildren(),
                  isClean());
          containersStatuses_ = null;
        }
        return containersStatusesBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto nodeHealthStatus_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProtoOrBuilder> nodeHealthStatusBuilder_;
      /**
       * optional .hadoop.yarn.NodeHealthStatusProto nodeHealthStatus = 4;
       */
      public boolean hasNodeHealthStatus() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * optional .hadoop.yarn.NodeHealthStatusProto nodeHealthStatus = 4;
       */
      public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto getNodeHealthStatus() {
        if (nodeHealthStatusBuilder_ == null) {
          return nodeHealthStatus_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto.getDefaultInstance() : nodeHealthStatus_;
        } else {
          return nodeHealthStatusBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.NodeHealthStatusProto nodeHealthStatus = 4;
       */
      public Builder setNodeHealthStatus(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto value) {
        if (nodeHealthStatusBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          nodeHealthStatus_ = value;
          onChanged();
        } else {
          nodeHealthStatusBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      /**
       * optional .hadoop.yarn.NodeHealthStatusProto nodeHealthStatus = 4;
       */
      public Builder setNodeHealthStatus(
          org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto.Builder builderForValue) {
        if (nodeHealthStatusBuilder_ == null) {
          nodeHealthStatus_ = builderForValue.build();
          onChanged();
        } else {
          nodeHealthStatusBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      /**
       * optional .hadoop.yarn.NodeHealthStatusProto nodeHealthStatus = 4;
       */
      public Builder mergeNodeHealthStatus(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto value) {
        if (nodeHealthStatusBuilder_ == null) {
          if (((bitField0_ & 0x00000008) != 0) &&
              nodeHealthStatus_ != null &&
              nodeHealthStatus_ != org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto.getDefaultInstance()) {
            nodeHealthStatus_ =
              org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto.newBuilder(nodeHealthStatus_).mergeFrom(value).buildPartial();
          } else {
            nodeHealthStatus_ = value;
          }
          onChanged();
        } else {
          nodeHealthStatusBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      /**
       * optional .hadoop.yarn.NodeHealthStatusProto nodeHealthStatus = 4;
       */
      public Builder clearNodeHealthStatus() {
        if (nodeHealthStatusBuilder_ == null) {
          nodeHealthStatus_ = null;
          onChanged();
        } else {
          nodeHealthStatusBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000008);
        return this;
      }
      /**
       * optional .hadoop.yarn.NodeHealthStatusProto nodeHealthStatus = 4;
       */
      public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto.Builder getNodeHealthStatusBuilder() {
        bitField0_ |= 0x00000008;
        onChanged();
        return getNodeHealthStatusFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.NodeHealthStatusProto nodeHealthStatus = 4;
       */
      public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProtoOrBuilder getNodeHealthStatusOrBuilder() {
        if (nodeHealthStatusBuilder_ != null) {
          return nodeHealthStatusBuilder_.getMessageOrBuilder();
        } else {
          return nodeHealthStatus_ == null ?
              org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto.getDefaultInstance() : nodeHealthStatus_;
        }
      }
      /**
       * optional .hadoop.yarn.NodeHealthStatusProto nodeHealthStatus = 4;
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProtoOrBuilder> 
          getNodeHealthStatusFieldBuilder() {
        if (nodeHealthStatusBuilder_ == null) {
          nodeHealthStatusBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProtoOrBuilder>(
                  getNodeHealthStatus(),
                  getParentForChildren(),
                  isClean());
          nodeHealthStatus_ = null;
        }
        return nodeHealthStatusBuilder_;
      }

      private java.util.List keepAliveApplications_ =
        java.util.Collections.emptyList();
      private void ensureKeepAliveApplicationsIsMutable() {
        if (!((bitField0_ & 0x00000010) != 0)) {
          keepAliveApplications_ = new java.util.ArrayList(keepAliveApplications_);
          bitField0_ |= 0x00000010;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> keepAliveApplicationsBuilder_;

      /**
       * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
       */
      public java.util.List getKeepAliveApplicationsList() {
        if (keepAliveApplicationsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(keepAliveApplications_);
        } else {
          return keepAliveApplicationsBuilder_.getMessageList();
        }
      }
      /**
       * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
       */
      public int getKeepAliveApplicationsCount() {
        if (keepAliveApplicationsBuilder_ == null) {
          return keepAliveApplications_.size();
        } else {
          return keepAliveApplicationsBuilder_.getCount();
        }
      }
      /**
       * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getKeepAliveApplications(int index) {
        if (keepAliveApplicationsBuilder_ == null) {
          return keepAliveApplications_.get(index);
        } else {
          return keepAliveApplicationsBuilder_.getMessage(index);
        }
      }
      /**
       * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
       */
      public Builder setKeepAliveApplications(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (keepAliveApplicationsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureKeepAliveApplicationsIsMutable();
          keepAliveApplications_.set(index, value);
          onChanged();
        } else {
          keepAliveApplicationsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
       */
      public Builder setKeepAliveApplications(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
        if (keepAliveApplicationsBuilder_ == null) {
          ensureKeepAliveApplicationsIsMutable();
          keepAliveApplications_.set(index, builderForValue.build());
          onChanged();
        } else {
          keepAliveApplicationsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
       */
      public Builder addKeepAliveApplications(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (keepAliveApplicationsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureKeepAliveApplicationsIsMutable();
          keepAliveApplications_.add(value);
          onChanged();
        } else {
          keepAliveApplicationsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
       */
      public Builder addKeepAliveApplications(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (keepAliveApplicationsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureKeepAliveApplicationsIsMutable();
          keepAliveApplications_.add(index, value);
          onChanged();
        } else {
          keepAliveApplicationsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
       */
      public Builder addKeepAliveApplications(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
        if (keepAliveApplicationsBuilder_ == null) {
          ensureKeepAliveApplicationsIsMutable();
          keepAliveApplications_.add(builderForValue.build());
          onChanged();
        } else {
          keepAliveApplicationsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
       */
      public Builder addKeepAliveApplications(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
        if (keepAliveApplicationsBuilder_ == null) {
          ensureKeepAliveApplicationsIsMutable();
          keepAliveApplications_.add(index, builderForValue.build());
          onChanged();
        } else {
          keepAliveApplicationsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
       */
      public Builder addAllKeepAliveApplications(
          java.lang.Iterable values) {
        if (keepAliveApplicationsBuilder_ == null) {
          ensureKeepAliveApplicationsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, keepAliveApplications_);
          onChanged();
        } else {
          keepAliveApplicationsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
       */
      public Builder clearKeepAliveApplications() {
        if (keepAliveApplicationsBuilder_ == null) {
          keepAliveApplications_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000010);
          onChanged();
        } else {
          keepAliveApplicationsBuilder_.clear();
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
       */
      public Builder removeKeepAliveApplications(int index) {
        if (keepAliveApplicationsBuilder_ == null) {
          ensureKeepAliveApplicationsIsMutable();
          keepAliveApplications_.remove(index);
          onChanged();
        } else {
          keepAliveApplicationsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getKeepAliveApplicationsBuilder(
          int index) {
        return getKeepAliveApplicationsFieldBuilder().getBuilder(index);
      }
      /**
       * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getKeepAliveApplicationsOrBuilder(
          int index) {
        if (keepAliveApplicationsBuilder_ == null) {
          return keepAliveApplications_.get(index);  } else {
          return keepAliveApplicationsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
       */
      public java.util.List 
           getKeepAliveApplicationsOrBuilderList() {
        if (keepAliveApplicationsBuilder_ != null) {
          return keepAliveApplicationsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(keepAliveApplications_);
        }
      }
      /**
       * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder addKeepAliveApplicationsBuilder() {
        return getKeepAliveApplicationsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance());
      }
      /**
       * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder addKeepAliveApplicationsBuilder(
          int index) {
        return getKeepAliveApplicationsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance());
      }
      /**
       * repeated .hadoop.yarn.ApplicationIdProto keep_alive_applications = 5;
       */
      public java.util.List 
           getKeepAliveApplicationsBuilderList() {
        return getKeepAliveApplicationsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> 
          getKeepAliveApplicationsFieldBuilder() {
        if (keepAliveApplicationsBuilder_ == null) {
          keepAliveApplicationsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
                  keepAliveApplications_,
                  ((bitField0_ & 0x00000010) != 0),
                  getParentForChildren(),
                  isClean());
          keepAliveApplications_ = null;
        }
        return keepAliveApplicationsBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto containersUtilization_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder> containersUtilizationBuilder_;
      /**
       * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 6;
       */
      public boolean hasContainersUtilization() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 6;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto getContainersUtilization() {
        if (containersUtilizationBuilder_ == null) {
          return containersUtilization_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance() : containersUtilization_;
        } else {
          return containersUtilizationBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 6;
       */
      public Builder setContainersUtilization(org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto value) {
        if (containersUtilizationBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          containersUtilization_ = value;
          onChanged();
        } else {
          containersUtilizationBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000020;
        return this;
      }
      /**
       * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 6;
       */
      public Builder setContainersUtilization(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder builderForValue) {
        if (containersUtilizationBuilder_ == null) {
          containersUtilization_ = builderForValue.build();
          onChanged();
        } else {
          containersUtilizationBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000020;
        return this;
      }
      /**
       * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 6;
       */
      public Builder mergeContainersUtilization(org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto value) {
        if (containersUtilizationBuilder_ == null) {
          if (((bitField0_ & 0x00000020) != 0) &&
              containersUtilization_ != null &&
              containersUtilization_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance()) {
            containersUtilization_ =
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.newBuilder(containersUtilization_).mergeFrom(value).buildPartial();
          } else {
            containersUtilization_ = value;
          }
          onChanged();
        } else {
          containersUtilizationBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000020;
        return this;
      }
      /**
       * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 6;
       */
      public Builder clearContainersUtilization() {
        if (containersUtilizationBuilder_ == null) {
          containersUtilization_ = null;
          onChanged();
        } else {
          containersUtilizationBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000020);
        return this;
      }
      /**
       * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 6;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder getContainersUtilizationBuilder() {
        bitField0_ |= 0x00000020;
        onChanged();
        return getContainersUtilizationFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 6;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder getContainersUtilizationOrBuilder() {
        if (containersUtilizationBuilder_ != null) {
          return containersUtilizationBuilder_.getMessageOrBuilder();
        } else {
          return containersUtilization_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance() : containersUtilization_;
        }
      }
      /**
       * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 6;
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder> 
          getContainersUtilizationFieldBuilder() {
        if (containersUtilizationBuilder_ == null) {
          containersUtilizationBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder>(
                  getContainersUtilization(),
                  getParentForChildren(),
                  isClean());
          containersUtilization_ = null;
        }
        return containersUtilizationBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto nodeUtilization_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder> nodeUtilizationBuilder_;
      /**
       * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 7;
       */
      public boolean hasNodeUtilization() {
        return ((bitField0_ & 0x00000040) != 0);
      }
      /**
       * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 7;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto getNodeUtilization() {
        if (nodeUtilizationBuilder_ == null) {
          return nodeUtilization_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance() : nodeUtilization_;
        } else {
          return nodeUtilizationBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 7;
       */
      public Builder setNodeUtilization(org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto value) {
        if (nodeUtilizationBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          nodeUtilization_ = value;
          onChanged();
        } else {
          nodeUtilizationBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000040;
        return this;
      }
      /**
       * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 7;
       */
      public Builder setNodeUtilization(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder builderForValue) {
        if (nodeUtilizationBuilder_ == null) {
          nodeUtilization_ = builderForValue.build();
          onChanged();
        } else {
          nodeUtilizationBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000040;
        return this;
      }
      /**
       * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 7;
       */
      public Builder mergeNodeUtilization(org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto value) {
        if (nodeUtilizationBuilder_ == null) {
          if (((bitField0_ & 0x00000040) != 0) &&
              nodeUtilization_ != null &&
              nodeUtilization_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance()) {
            nodeUtilization_ =
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.newBuilder(nodeUtilization_).mergeFrom(value).buildPartial();
          } else {
            nodeUtilization_ = value;
          }
          onChanged();
        } else {
          nodeUtilizationBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000040;
        return this;
      }
      /**
       * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 7;
       */
      public Builder clearNodeUtilization() {
        if (nodeUtilizationBuilder_ == null) {
          nodeUtilization_ = null;
          onChanged();
        } else {
          nodeUtilizationBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000040);
        return this;
      }
      /**
       * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 7;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder getNodeUtilizationBuilder() {
        bitField0_ |= 0x00000040;
        onChanged();
        return getNodeUtilizationFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 7;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder getNodeUtilizationOrBuilder() {
        if (nodeUtilizationBuilder_ != null) {
          return nodeUtilizationBuilder_.getMessageOrBuilder();
        } else {
          return nodeUtilization_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance() : nodeUtilization_;
        }
      }
      /**
       * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 7;
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder> 
          getNodeUtilizationFieldBuilder() {
        if (nodeUtilizationBuilder_ == null) {
          nodeUtilizationBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder>(
                  getNodeUtilization(),
                  getParentForChildren(),
                  isClean());
          nodeUtilization_ = null;
        }
        return nodeUtilizationBuilder_;
      }

      private java.util.List increasedContainers_ =
        java.util.Collections.emptyList();
      private void ensureIncreasedContainersIsMutable() {
        if (!((bitField0_ & 0x00000080) != 0)) {
          increasedContainers_ = new java.util.ArrayList(increasedContainers_);
          bitField0_ |= 0x00000080;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> increasedContainersBuilder_;

      /**
       * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
       */
      public java.util.List getIncreasedContainersList() {
        if (increasedContainersBuilder_ == null) {
          return java.util.Collections.unmodifiableList(increasedContainers_);
        } else {
          return increasedContainersBuilder_.getMessageList();
        }
      }
      /**
       * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
       */
      public int getIncreasedContainersCount() {
        if (increasedContainersBuilder_ == null) {
          return increasedContainers_.size();
        } else {
          return increasedContainersBuilder_.getCount();
        }
      }
      /**
       * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getIncreasedContainers(int index) {
        if (increasedContainersBuilder_ == null) {
          return increasedContainers_.get(index);
        } else {
          return increasedContainersBuilder_.getMessage(index);
        }
      }
      /**
       * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
       */
      public Builder setIncreasedContainers(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) {
        if (increasedContainersBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureIncreasedContainersIsMutable();
          increasedContainers_.set(index, value);
          onChanged();
        } else {
          increasedContainersBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
       */
      public Builder setIncreasedContainers(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) {
        if (increasedContainersBuilder_ == null) {
          ensureIncreasedContainersIsMutable();
          increasedContainers_.set(index, builderForValue.build());
          onChanged();
        } else {
          increasedContainersBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
       */
      public Builder addIncreasedContainers(org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) {
        if (increasedContainersBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureIncreasedContainersIsMutable();
          increasedContainers_.add(value);
          onChanged();
        } else {
          increasedContainersBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
       */
      public Builder addIncreasedContainers(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) {
        if (increasedContainersBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureIncreasedContainersIsMutable();
          increasedContainers_.add(index, value);
          onChanged();
        } else {
          increasedContainersBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
       */
      public Builder addIncreasedContainers(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) {
        if (increasedContainersBuilder_ == null) {
          ensureIncreasedContainersIsMutable();
          increasedContainers_.add(builderForValue.build());
          onChanged();
        } else {
          increasedContainersBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
       */
      public Builder addIncreasedContainers(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) {
        if (increasedContainersBuilder_ == null) {
          ensureIncreasedContainersIsMutable();
          increasedContainers_.add(index, builderForValue.build());
          onChanged();
        } else {
          increasedContainersBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
       */
      public Builder addAllIncreasedContainers(
          java.lang.Iterable values) {
        if (increasedContainersBuilder_ == null) {
          ensureIncreasedContainersIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, increasedContainers_);
          onChanged();
        } else {
          increasedContainersBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
       */
      public Builder clearIncreasedContainers() {
        if (increasedContainersBuilder_ == null) {
          increasedContainers_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000080);
          onChanged();
        } else {
          increasedContainersBuilder_.clear();
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
       */
      public Builder removeIncreasedContainers(int index) {
        if (increasedContainersBuilder_ == null) {
          ensureIncreasedContainersIsMutable();
          increasedContainers_.remove(index);
          onChanged();
        } else {
          increasedContainersBuilder_.remove(index);
        }
        return this;
      }
      /**
       * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder getIncreasedContainersBuilder(
          int index) {
        return getIncreasedContainersFieldBuilder().getBuilder(index);
      }
      /**
       * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getIncreasedContainersOrBuilder(
          int index) {
        if (increasedContainersBuilder_ == null) {
          return increasedContainers_.get(index);  } else {
          return increasedContainersBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
       */
      public java.util.List 
           getIncreasedContainersOrBuilderList() {
        if (increasedContainersBuilder_ != null) {
          return increasedContainersBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(increasedContainers_);
        }
      }
      /**
       * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder addIncreasedContainersBuilder() {
        return getIncreasedContainersFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance());
      }
      /**
       * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder addIncreasedContainersBuilder(
          int index) {
        return getIncreasedContainersFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance());
      }
      /**
       * repeated .hadoop.yarn.ContainerProto increased_containers = 8;
       */
      public java.util.List 
           getIncreasedContainersBuilderList() {
        return getIncreasedContainersFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> 
          getIncreasedContainersFieldBuilder() {
        if (increasedContainersBuilder_ == null) {
          increasedContainersBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder>(
                  increasedContainers_,
                  ((bitField0_ & 0x00000080) != 0),
                  getParentForChildren(),
                  isClean());
          increasedContainers_ = null;
        }
        return increasedContainersBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto opportunisticContainersStatus_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProtoOrBuilder> opportunisticContainersStatusBuilder_;
      /**
       * optional .hadoop.yarn.OpportunisticContainersStatusProto opportunistic_containers_status = 9;
       */
      public boolean hasOpportunisticContainersStatus() {
        return ((bitField0_ & 0x00000100) != 0);
      }
      /**
       * optional .hadoop.yarn.OpportunisticContainersStatusProto opportunistic_containers_status = 9;
       */
      public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto getOpportunisticContainersStatus() {
        if (opportunisticContainersStatusBuilder_ == null) {
          return opportunisticContainersStatus_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto.getDefaultInstance() : opportunisticContainersStatus_;
        } else {
          return opportunisticContainersStatusBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.OpportunisticContainersStatusProto opportunistic_containers_status = 9;
       */
      public Builder setOpportunisticContainersStatus(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto value) {
        if (opportunisticContainersStatusBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          opportunisticContainersStatus_ = value;
          onChanged();
        } else {
          opportunisticContainersStatusBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000100;
        return this;
      }
      /**
       * optional .hadoop.yarn.OpportunisticContainersStatusProto opportunistic_containers_status = 9;
       */
      public Builder setOpportunisticContainersStatus(
          org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto.Builder builderForValue) {
        if (opportunisticContainersStatusBuilder_ == null) {
          opportunisticContainersStatus_ = builderForValue.build();
          onChanged();
        } else {
          opportunisticContainersStatusBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000100;
        return this;
      }
      /**
       * optional .hadoop.yarn.OpportunisticContainersStatusProto opportunistic_containers_status = 9;
       */
      public Builder mergeOpportunisticContainersStatus(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto value) {
        if (opportunisticContainersStatusBuilder_ == null) {
          if (((bitField0_ & 0x00000100) != 0) &&
              opportunisticContainersStatus_ != null &&
              opportunisticContainersStatus_ != org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto.getDefaultInstance()) {
            opportunisticContainersStatus_ =
              org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto.newBuilder(opportunisticContainersStatus_).mergeFrom(value).buildPartial();
          } else {
            opportunisticContainersStatus_ = value;
          }
          onChanged();
        } else {
          opportunisticContainersStatusBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000100;
        return this;
      }
      /**
       * optional .hadoop.yarn.OpportunisticContainersStatusProto opportunistic_containers_status = 9;
       */
      public Builder clearOpportunisticContainersStatus() {
        if (opportunisticContainersStatusBuilder_ == null) {
          opportunisticContainersStatus_ = null;
          onChanged();
        } else {
          opportunisticContainersStatusBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000100);
        return this;
      }
      /**
       * optional .hadoop.yarn.OpportunisticContainersStatusProto opportunistic_containers_status = 9;
       */
      public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto.Builder getOpportunisticContainersStatusBuilder() {
        bitField0_ |= 0x00000100;
        onChanged();
        return getOpportunisticContainersStatusFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.OpportunisticContainersStatusProto opportunistic_containers_status = 9;
       */
      public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProtoOrBuilder getOpportunisticContainersStatusOrBuilder() {
        if (opportunisticContainersStatusBuilder_ != null) {
          return opportunisticContainersStatusBuilder_.getMessageOrBuilder();
        } else {
          return opportunisticContainersStatus_ == null ?
              org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto.getDefaultInstance() : opportunisticContainersStatus_;
        }
      }
      /**
       * optional .hadoop.yarn.OpportunisticContainersStatusProto opportunistic_containers_status = 9;
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProtoOrBuilder> 
          getOpportunisticContainersStatusFieldBuilder() {
        if (opportunisticContainersStatusBuilder_ == null) {
          opportunisticContainersStatusBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProtoOrBuilder>(
                  getOpportunisticContainersStatus(),
                  getParentForChildren(),
                  isClean());
          opportunisticContainersStatus_ = null;
        }
        return opportunisticContainersStatusBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeStatusProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.NodeStatusProto)
    private static final org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
      @java.lang.Override
      public NodeStatusProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        return new NodeStatusProto(input, extensionRegistry);
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface OpportunisticContainersStatusProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.OpportunisticContainersStatusProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * optional int32 running_opport_containers = 1;
     */
    boolean hasRunningOpportContainers();
    /**
     * optional int32 running_opport_containers = 1;
     */
    int getRunningOpportContainers();

    /**
     * optional int64 opport_memory_used = 2;
     */
    boolean hasOpportMemoryUsed();
    /**
     * optional int64 opport_memory_used = 2;
     */
    long getOpportMemoryUsed();

    /**
     * optional int32 opport_cores_used = 3;
     */
    boolean hasOpportCoresUsed();
    /**
     * optional int32 opport_cores_used = 3;
     */
    int getOpportCoresUsed();

    /**
     * optional int32 queued_opport_containers = 4;
     */
    boolean hasQueuedOpportContainers();
    /**
     * optional int32 queued_opport_containers = 4;
     */
    int getQueuedOpportContainers();

    /**
     * optional int32 wait_queue_length = 5;
     */
    boolean hasWaitQueueLength();
    /**
     * optional int32 wait_queue_length = 5;
     */
    int getWaitQueueLength();

    /**
     * optional int32 estimated_queue_wait_time = 6;
     */
    boolean hasEstimatedQueueWaitTime();
    /**
     * optional int32 estimated_queue_wait_time = 6;
     */
    int getEstimatedQueueWaitTime();

    /**
     * optional int32 opport_queue_capacity = 7;
     */
    boolean hasOpportQueueCapacity();
    /**
     * optional int32 opport_queue_capacity = 7;
     */
    int getOpportQueueCapacity();
  }
  /**
   * Protobuf type {@code hadoop.yarn.OpportunisticContainersStatusProto}
   */
  public  static final class OpportunisticContainersStatusProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.OpportunisticContainersStatusProto)
      OpportunisticContainersStatusProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use OpportunisticContainersStatusProto.newBuilder() to construct.
    private OpportunisticContainersStatusProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) {
      super(builder);
    }
    private OpportunisticContainersStatusProto() {
    }

    @java.lang.Override
    public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
    getUnknownFields() {
      return this.unknownFields;
    }
    private OpportunisticContainersStatusProto(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      this();
      if (extensionRegistry == null) {
        throw new java.lang.NullPointerException();
      }
      int mutable_bitField0_ = 0;
      org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
          org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            case 8: {
              bitField0_ |= 0x00000001;
              runningOpportContainers_ = input.readInt32();
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              opportMemoryUsed_ = input.readInt64();
              break;
            }
            case 24: {
              bitField0_ |= 0x00000004;
              opportCoresUsed_ = input.readInt32();
              break;
            }
            case 32: {
              bitField0_ |= 0x00000008;
              queuedOpportContainers_ = input.readInt32();
              break;
            }
            case 40: {
              bitField0_ |= 0x00000010;
              waitQueueLength_ = input.readInt32();
              break;
            }
            case 48: {
              bitField0_ |= 0x00000020;
              estimatedQueueWaitTime_ = input.readInt32();
              break;
            }
            case 56: {
              bitField0_ |= 0x00000040;
              opportQueueCapacity_ = input.readInt32();
              break;
            }
            default: {
              if (!parseUnknownField(
                  input, unknownFields, extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
          }
        }
      } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
            e).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_OpportunisticContainersStatusProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_OpportunisticContainersStatusProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto.Builder.class);
    }

    private int bitField0_;
    public static final int RUNNING_OPPORT_CONTAINERS_FIELD_NUMBER = 1;
    private int runningOpportContainers_;
    /**
     * optional int32 running_opport_containers = 1;
     */
    public boolean hasRunningOpportContainers() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * optional int32 running_opport_containers = 1;
     */
    public int getRunningOpportContainers() {
      return runningOpportContainers_;
    }

    public static final int OPPORT_MEMORY_USED_FIELD_NUMBER = 2;
    private long opportMemoryUsed_;
    /**
     * optional int64 opport_memory_used = 2;
     */
    public boolean hasOpportMemoryUsed() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * optional int64 opport_memory_used = 2;
     */
    public long getOpportMemoryUsed() {
      return opportMemoryUsed_;
    }

    public static final int OPPORT_CORES_USED_FIELD_NUMBER = 3;
    private int opportCoresUsed_;
    /**
     * optional int32 opport_cores_used = 3;
     */
    public boolean hasOpportCoresUsed() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * optional int32 opport_cores_used = 3;
     */
    public int getOpportCoresUsed() {
      return opportCoresUsed_;
    }

    public static final int QUEUED_OPPORT_CONTAINERS_FIELD_NUMBER = 4;
    private int queuedOpportContainers_;
    /**
     * optional int32 queued_opport_containers = 4;
     */
    public boolean hasQueuedOpportContainers() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * optional int32 queued_opport_containers = 4;
     */
    public int getQueuedOpportContainers() {
      return queuedOpportContainers_;
    }

    public static final int WAIT_QUEUE_LENGTH_FIELD_NUMBER = 5;
    private int waitQueueLength_;
    /**
     * optional int32 wait_queue_length = 5;
     */
    public boolean hasWaitQueueLength() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * optional int32 wait_queue_length = 5;
     */
    public int getWaitQueueLength() {
      return waitQueueLength_;
    }

    public static final int ESTIMATED_QUEUE_WAIT_TIME_FIELD_NUMBER = 6;
    private int estimatedQueueWaitTime_;
    /**
     * optional int32 estimated_queue_wait_time = 6;
     */
    public boolean hasEstimatedQueueWaitTime() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * optional int32 estimated_queue_wait_time = 6;
     */
    public int getEstimatedQueueWaitTime() {
      return estimatedQueueWaitTime_;
    }

    public static final int OPPORT_QUEUE_CAPACITY_FIELD_NUMBER = 7;
    private int opportQueueCapacity_;
    /**
     * optional int32 opport_queue_capacity = 7;
     */
    public boolean hasOpportQueueCapacity() {
      return ((bitField0_ & 0x00000040) != 0);
    }
    /**
     * optional int32 opport_queue_capacity = 7;
     */
    public int getOpportQueueCapacity() {
      return opportQueueCapacity_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeInt32(1, runningOpportContainers_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt64(2, opportMemoryUsed_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeInt32(3, opportCoresUsed_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeInt32(4, queuedOpportContainers_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeInt32(5, waitQueueLength_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        output.writeInt32(6, estimatedQueueWaitTime_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        output.writeInt32(7, opportQueueCapacity_);
      }
      unknownFields.writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(1, runningOpportContainers_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(2, opportMemoryUsed_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(3, opportCoresUsed_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(4, queuedOpportContainers_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(5, waitQueueLength_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(6, estimatedQueueWaitTime_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(7, opportQueueCapacity_);
      }
      size += unknownFields.getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto) obj;

      if (hasRunningOpportContainers() != other.hasRunningOpportContainers()) return false;
      if (hasRunningOpportContainers()) {
        if (getRunningOpportContainers()
            != other.getRunningOpportContainers()) return false;
      }
      if (hasOpportMemoryUsed() != other.hasOpportMemoryUsed()) return false;
      if (hasOpportMemoryUsed()) {
        if (getOpportMemoryUsed()
            != other.getOpportMemoryUsed()) return false;
      }
      if (hasOpportCoresUsed() != other.hasOpportCoresUsed()) return false;
      if (hasOpportCoresUsed()) {
        if (getOpportCoresUsed()
            != other.getOpportCoresUsed()) return false;
      }
      if (hasQueuedOpportContainers() != other.hasQueuedOpportContainers()) return false;
      if (hasQueuedOpportContainers()) {
        if (getQueuedOpportContainers()
            != other.getQueuedOpportContainers()) return false;
      }
      if (hasWaitQueueLength() != other.hasWaitQueueLength()) return false;
      if (hasWaitQueueLength()) {
        if (getWaitQueueLength()
            != other.getWaitQueueLength()) return false;
      }
      if (hasEstimatedQueueWaitTime() != other.hasEstimatedQueueWaitTime()) return false;
      if (hasEstimatedQueueWaitTime()) {
        if (getEstimatedQueueWaitTime()
            != other.getEstimatedQueueWaitTime()) return false;
      }
      if (hasOpportQueueCapacity() != other.hasOpportQueueCapacity()) return false;
      if (hasOpportQueueCapacity()) {
        if (getOpportQueueCapacity()
            != other.getOpportQueueCapacity()) return false;
      }
      if (!unknownFields.equals(other.unknownFields)) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasRunningOpportContainers()) {
        hash = (37 * hash) + RUNNING_OPPORT_CONTAINERS_FIELD_NUMBER;
        hash = (53 * hash) + getRunningOpportContainers();
      }
      if (hasOpportMemoryUsed()) {
        hash = (37 * hash) + OPPORT_MEMORY_USED_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getOpportMemoryUsed());
      }
      if (hasOpportCoresUsed()) {
        hash = (37 * hash) + OPPORT_CORES_USED_FIELD_NUMBER;
        hash = (53 * hash) + getOpportCoresUsed();
      }
      if (hasQueuedOpportContainers()) {
        hash = (37 * hash) + QUEUED_OPPORT_CONTAINERS_FIELD_NUMBER;
        hash = (53 * hash) + getQueuedOpportContainers();
      }
      if (hasWaitQueueLength()) {
        hash = (37 * hash) + WAIT_QUEUE_LENGTH_FIELD_NUMBER;
        hash = (53 * hash) + getWaitQueueLength();
      }
      if (hasEstimatedQueueWaitTime()) {
        hash = (37 * hash) + ESTIMATED_QUEUE_WAIT_TIME_FIELD_NUMBER;
        hash = (53 * hash) + getEstimatedQueueWaitTime();
      }
      if (hasOpportQueueCapacity()) {
        hash = (37 * hash) + OPPORT_QUEUE_CAPACITY_FIELD_NUMBER;
        hash = (53 * hash) + getOpportQueueCapacity();
      }
      hash = (29 * hash) + unknownFields.hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.OpportunisticContainersStatusProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.OpportunisticContainersStatusProto)
        org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_OpportunisticContainersStatusProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_OpportunisticContainersStatusProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        runningOpportContainers_ = 0;
        bitField0_ = (bitField0_ & ~0x00000001);
        opportMemoryUsed_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000002);
        opportCoresUsed_ = 0;
        bitField0_ = (bitField0_ & ~0x00000004);
        queuedOpportContainers_ = 0;
        bitField0_ = (bitField0_ & ~0x00000008);
        waitQueueLength_ = 0;
        bitField0_ = (bitField0_ & ~0x00000010);
        estimatedQueueWaitTime_ = 0;
        bitField0_ = (bitField0_ & ~0x00000020);
        opportQueueCapacity_ = 0;
        bitField0_ = (bitField0_ & ~0x00000040);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_OpportunisticContainersStatusProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto build() {
        org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.runningOpportContainers_ = runningOpportContainers_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.opportMemoryUsed_ = opportMemoryUsed_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.opportCoresUsed_ = opportCoresUsed_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.queuedOpportContainers_ = queuedOpportContainers_;
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.waitQueueLength_ = waitQueueLength_;
          to_bitField0_ |= 0x00000010;
        }
        if (((from_bitField0_ & 0x00000020) != 0)) {
          result.estimatedQueueWaitTime_ = estimatedQueueWaitTime_;
          to_bitField0_ |= 0x00000020;
        }
        if (((from_bitField0_ & 0x00000040) != 0)) {
          result.opportQueueCapacity_ = opportQueueCapacity_;
          to_bitField0_ |= 0x00000040;
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto.getDefaultInstance()) return this;
        if (other.hasRunningOpportContainers()) {
          setRunningOpportContainers(other.getRunningOpportContainers());
        }
        if (other.hasOpportMemoryUsed()) {
          setOpportMemoryUsed(other.getOpportMemoryUsed());
        }
        if (other.hasOpportCoresUsed()) {
          setOpportCoresUsed(other.getOpportCoresUsed());
        }
        if (other.hasQueuedOpportContainers()) {
          setQueuedOpportContainers(other.getQueuedOpportContainers());
        }
        if (other.hasWaitQueueLength()) {
          setWaitQueueLength(other.getWaitQueueLength());
        }
        if (other.hasEstimatedQueueWaitTime()) {
          setEstimatedQueueWaitTime(other.getEstimatedQueueWaitTime());
        }
        if (other.hasOpportQueueCapacity()) {
          setOpportQueueCapacity(other.getOpportQueueCapacity());
        }
        this.mergeUnknownFields(other.unknownFields);
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto) e.getUnfinishedMessage();
          throw e.unwrapIOException();
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      private int runningOpportContainers_ ;
      /**
       * optional int32 running_opport_containers = 1;
       */
      public boolean hasRunningOpportContainers() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * optional int32 running_opport_containers = 1;
       */
      public int getRunningOpportContainers() {
        return runningOpportContainers_;
      }
      /**
       * optional int32 running_opport_containers = 1;
       */
      public Builder setRunningOpportContainers(int value) {
        bitField0_ |= 0x00000001;
        runningOpportContainers_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 running_opport_containers = 1;
       */
      public Builder clearRunningOpportContainers() {
        bitField0_ = (bitField0_ & ~0x00000001);
        runningOpportContainers_ = 0;
        onChanged();
        return this;
      }

      private long opportMemoryUsed_ ;
      /**
       * optional int64 opport_memory_used = 2;
       */
      public boolean hasOpportMemoryUsed() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * optional int64 opport_memory_used = 2;
       */
      public long getOpportMemoryUsed() {
        return opportMemoryUsed_;
      }
      /**
       * optional int64 opport_memory_used = 2;
       */
      public Builder setOpportMemoryUsed(long value) {
        bitField0_ |= 0x00000002;
        opportMemoryUsed_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int64 opport_memory_used = 2;
       */
      public Builder clearOpportMemoryUsed() {
        bitField0_ = (bitField0_ & ~0x00000002);
        opportMemoryUsed_ = 0L;
        onChanged();
        return this;
      }

      private int opportCoresUsed_ ;
      /**
       * optional int32 opport_cores_used = 3;
       */
      public boolean hasOpportCoresUsed() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * optional int32 opport_cores_used = 3;
       */
      public int getOpportCoresUsed() {
        return opportCoresUsed_;
      }
      /**
       * optional int32 opport_cores_used = 3;
       */
      public Builder setOpportCoresUsed(int value) {
        bitField0_ |= 0x00000004;
        opportCoresUsed_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 opport_cores_used = 3;
       */
      public Builder clearOpportCoresUsed() {
        bitField0_ = (bitField0_ & ~0x00000004);
        opportCoresUsed_ = 0;
        onChanged();
        return this;
      }

      private int queuedOpportContainers_ ;
      /**
       * optional int32 queued_opport_containers = 4;
       */
      public boolean hasQueuedOpportContainers() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * optional int32 queued_opport_containers = 4;
       */
      public int getQueuedOpportContainers() {
        return queuedOpportContainers_;
      }
      /**
       * optional int32 queued_opport_containers = 4;
       */
      public Builder setQueuedOpportContainers(int value) {
        bitField0_ |= 0x00000008;
        queuedOpportContainers_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 queued_opport_containers = 4;
       */
      public Builder clearQueuedOpportContainers() {
        bitField0_ = (bitField0_ & ~0x00000008);
        queuedOpportContainers_ = 0;
        onChanged();
        return this;
      }

      private int waitQueueLength_ ;
      /**
       * optional int32 wait_queue_length = 5;
       */
      public boolean hasWaitQueueLength() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * optional int32 wait_queue_length = 5;
       */
      public int getWaitQueueLength() {
        return waitQueueLength_;
      }
      /**
       * optional int32 wait_queue_length = 5;
       */
      public Builder setWaitQueueLength(int value) {
        bitField0_ |= 0x00000010;
        waitQueueLength_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 wait_queue_length = 5;
       */
      public Builder clearWaitQueueLength() {
        bitField0_ = (bitField0_ & ~0x00000010);
        waitQueueLength_ = 0;
        onChanged();
        return this;
      }

      private int estimatedQueueWaitTime_ ;
      /**
       * optional int32 estimated_queue_wait_time = 6;
       */
      public boolean hasEstimatedQueueWaitTime() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * optional int32 estimated_queue_wait_time = 6;
       */
      public int getEstimatedQueueWaitTime() {
        return estimatedQueueWaitTime_;
      }
      /**
       * optional int32 estimated_queue_wait_time = 6;
       */
      public Builder setEstimatedQueueWaitTime(int value) {
        bitField0_ |= 0x00000020;
        estimatedQueueWaitTime_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 estimated_queue_wait_time = 6;
       */
      public Builder clearEstimatedQueueWaitTime() {
        bitField0_ = (bitField0_ & ~0x00000020);
        estimatedQueueWaitTime_ = 0;
        onChanged();
        return this;
      }

      private int opportQueueCapacity_ ;
      /**
       * optional int32 opport_queue_capacity = 7;
       */
      public boolean hasOpportQueueCapacity() {
        return ((bitField0_ & 0x00000040) != 0);
      }
      /**
       * optional int32 opport_queue_capacity = 7;
       */
      public int getOpportQueueCapacity() {
        return opportQueueCapacity_;
      }
      /**
       * optional int32 opport_queue_capacity = 7;
       */
      public Builder setOpportQueueCapacity(int value) {
        bitField0_ |= 0x00000040;
        opportQueueCapacity_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 opport_queue_capacity = 7;
       */
      public Builder clearOpportQueueCapacity() {
        bitField0_ = (bitField0_ & ~0x00000040);
        opportQueueCapacity_ = 0;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.OpportunisticContainersStatusProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.OpportunisticContainersStatusProto)
    private static final org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
      @java.lang.Override
      public OpportunisticContainersStatusProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        return new OpportunisticContainersStatusProto(input, extensionRegistry);
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.OpportunisticContainersStatusProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface MasterKeyProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.MasterKeyProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * optional int32 key_id = 1;
     */
    boolean hasKeyId();
    /**
     * optional int32 key_id = 1;
     */
    int getKeyId();

    /**
     * optional bytes bytes = 2;
     */
    boolean hasBytes();
    /**
     * optional bytes bytes = 2;
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString getBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.MasterKeyProto}
   */
  public  static final class MasterKeyProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.MasterKeyProto)
      MasterKeyProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use MasterKeyProto.newBuilder() to construct.
    private MasterKeyProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) {
      super(builder);
    }
    private MasterKeyProto() {
      bytes_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
    }

    @java.lang.Override
    public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
    getUnknownFields() {
      return this.unknownFields;
    }
    private MasterKeyProto(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      this();
      if (extensionRegistry == null) {
        throw new java.lang.NullPointerException();
      }
      int mutable_bitField0_ = 0;
      org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
          org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            case 8: {
              bitField0_ |= 0x00000001;
              keyId_ = input.readInt32();
              break;
            }
            case 18: {
              bitField0_ |= 0x00000002;
              bytes_ = input.readBytes();
              break;
            }
            default: {
              if (!parseUnknownField(
                  input, unknownFields, extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
          }
        }
      } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
            e).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_MasterKeyProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_MasterKeyProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder.class);
    }

    private int bitField0_;
    public static final int KEY_ID_FIELD_NUMBER = 1;
    private int keyId_;
    /**
     * optional int32 key_id = 1;
     */
    public boolean hasKeyId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * optional int32 key_id = 1;
     */
    public int getKeyId() {
      return keyId_;
    }

    public static final int BYTES_FIELD_NUMBER = 2;
    private org.apache.hadoop.thirdparty.protobuf.ByteString bytes_;
    /**
     * optional bytes bytes = 2;
     */
    public boolean hasBytes() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * optional bytes bytes = 2;
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString getBytes() {
      return bytes_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeInt32(1, keyId_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeBytes(2, bytes_);
      }
      unknownFields.writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(1, keyId_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBytesSize(2, bytes_);
      }
      size += unknownFields.getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto) obj;

      if (hasKeyId() != other.hasKeyId()) return false;
      if (hasKeyId()) {
        if (getKeyId()
            != other.getKeyId()) return false;
      }
      if (hasBytes() != other.hasBytes()) return false;
      if (hasBytes()) {
        if (!getBytes()
            .equals(other.getBytes())) return false;
      }
      if (!unknownFields.equals(other.unknownFields)) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasKeyId()) {
        hash = (37 * hash) + KEY_ID_FIELD_NUMBER;
        hash = (53 * hash) + getKeyId();
      }
      if (hasBytes()) {
        hash = (37 * hash) + BYTES_FIELD_NUMBER;
        hash = (53 * hash) + getBytes().hashCode();
      }
      hash = (29 * hash) + unknownFields.hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.MasterKeyProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.MasterKeyProto)
        org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_MasterKeyProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_MasterKeyProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        keyId_ = 0;
        bitField0_ = (bitField0_ & ~0x00000001);
        bytes_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_MasterKeyProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto build() {
        org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.keyId_ = keyId_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          to_bitField0_ |= 0x00000002;
        }
        result.bytes_ = bytes_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance()) return this;
        if (other.hasKeyId()) {
          setKeyId(other.getKeyId());
        }
        if (other.hasBytes()) {
          setBytes(other.getBytes());
        }
        this.mergeUnknownFields(other.unknownFields);
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto) e.getUnfinishedMessage();
          throw e.unwrapIOException();
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      private int keyId_ ;
      /**
       * optional int32 key_id = 1;
       */
      public boolean hasKeyId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * optional int32 key_id = 1;
       */
      public int getKeyId() {
        return keyId_;
      }
      /**
       * optional int32 key_id = 1;
       */
      public Builder setKeyId(int value) {
        bitField0_ |= 0x00000001;
        keyId_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 key_id = 1;
       */
      public Builder clearKeyId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        keyId_ = 0;
        onChanged();
        return this;
      }

      private org.apache.hadoop.thirdparty.protobuf.ByteString bytes_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
      /**
       * optional bytes bytes = 2;
       */
      public boolean hasBytes() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * optional bytes bytes = 2;
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString getBytes() {
        return bytes_;
      }
      /**
       * optional bytes bytes = 2;
       */
      public Builder setBytes(org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000002;
        bytes_ = value;
        onChanged();
        return this;
      }
      /**
       * optional bytes bytes = 2;
       */
      public Builder clearBytes() {
        bitField0_ = (bitField0_ & ~0x00000002);
        bytes_ = getDefaultInstance().getBytes();
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.MasterKeyProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.MasterKeyProto)
    private static final org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
      @java.lang.Override
      public MasterKeyProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        return new MasterKeyProto(input, extensionRegistry);
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface NodeHealthStatusProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeHealthStatusProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * optional bool is_node_healthy = 1;
     */
    boolean hasIsNodeHealthy();
    /**
     * optional bool is_node_healthy = 1;
     */
    boolean getIsNodeHealthy();

    /**
     * optional string health_report = 2;
     */
    boolean hasHealthReport();
    /**
     * optional string health_report = 2;
     */
    java.lang.String getHealthReport();
    /**
     * optional string health_report = 2;
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getHealthReportBytes();

    /**
     * optional int64 last_health_report_time = 3;
     */
    boolean hasLastHealthReportTime();
    /**
     * optional int64 last_health_report_time = 3;
     */
    long getLastHealthReportTime();
  }
  /**
   * Protobuf type {@code hadoop.yarn.NodeHealthStatusProto}
   */
  public  static final class NodeHealthStatusProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.NodeHealthStatusProto)
      NodeHealthStatusProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use NodeHealthStatusProto.newBuilder() to construct.
    private NodeHealthStatusProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) {
      super(builder);
    }
    private NodeHealthStatusProto() {
      healthReport_ = "";
    }

    @java.lang.Override
    public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
    getUnknownFields() {
      return this.unknownFields;
    }
    private NodeHealthStatusProto(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      this();
      if (extensionRegistry == null) {
        throw new java.lang.NullPointerException();
      }
      int mutable_bitField0_ = 0;
      org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
          org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            case 8: {
              bitField0_ |= 0x00000001;
              isNodeHealthy_ = input.readBool();
              break;
            }
            case 18: {
              org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
              bitField0_ |= 0x00000002;
              healthReport_ = bs;
              break;
            }
            case 24: {
              bitField0_ |= 0x00000004;
              lastHealthReportTime_ = input.readInt64();
              break;
            }
            default: {
              if (!parseUnknownField(
                  input, unknownFields, extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
          }
        }
      } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
            e).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_NodeHealthStatusProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_NodeHealthStatusProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto.Builder.class);
    }

    private int bitField0_;
    public static final int IS_NODE_HEALTHY_FIELD_NUMBER = 1;
    private boolean isNodeHealthy_;
    /**
     * optional bool is_node_healthy = 1;
     */
    public boolean hasIsNodeHealthy() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * optional bool is_node_healthy = 1;
     */
    public boolean getIsNodeHealthy() {
      return isNodeHealthy_;
    }

    public static final int HEALTH_REPORT_FIELD_NUMBER = 2;
    private volatile java.lang.Object healthReport_;
    /**
     * optional string health_report = 2;
     */
    public boolean hasHealthReport() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * optional string health_report = 2;
     */
    public java.lang.String getHealthReport() {
      java.lang.Object ref = healthReport_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          healthReport_ = s;
        }
        return s;
      }
    }
    /**
     * optional string health_report = 2;
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getHealthReportBytes() {
      java.lang.Object ref = healthReport_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        healthReport_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int LAST_HEALTH_REPORT_TIME_FIELD_NUMBER = 3;
    private long lastHealthReportTime_;
    /**
     * optional int64 last_health_report_time = 3;
     */
    public boolean hasLastHealthReportTime() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * optional int64 last_health_report_time = 3;
     */
    public long getLastHealthReportTime() {
      return lastHealthReportTime_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeBool(1, isNodeHealthy_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, healthReport_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeInt64(3, lastHealthReportTime_);
      }
      unknownFields.writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBoolSize(1, isNodeHealthy_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, healthReport_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(3, lastHealthReportTime_);
      }
      size += unknownFields.getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto) obj;

      if (hasIsNodeHealthy() != other.hasIsNodeHealthy()) return false;
      if (hasIsNodeHealthy()) {
        if (getIsNodeHealthy()
            != other.getIsNodeHealthy()) return false;
      }
      if (hasHealthReport() != other.hasHealthReport()) return false;
      if (hasHealthReport()) {
        if (!getHealthReport()
            .equals(other.getHealthReport())) return false;
      }
      if (hasLastHealthReportTime() != other.hasLastHealthReportTime()) return false;
      if (hasLastHealthReportTime()) {
        if (getLastHealthReportTime()
            != other.getLastHealthReportTime()) return false;
      }
      if (!unknownFields.equals(other.unknownFields)) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasIsNodeHealthy()) {
        hash = (37 * hash) + IS_NODE_HEALTHY_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
            getIsNodeHealthy());
      }
      if (hasHealthReport()) {
        hash = (37 * hash) + HEALTH_REPORT_FIELD_NUMBER;
        hash = (53 * hash) + getHealthReport().hashCode();
      }
      if (hasLastHealthReportTime()) {
        hash = (37 * hash) + LAST_HEALTH_REPORT_TIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getLastHealthReportTime());
      }
      hash = (29 * hash) + unknownFields.hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.NodeHealthStatusProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeHealthStatusProto)
        org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_NodeHealthStatusProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_NodeHealthStatusProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        isNodeHealthy_ = false;
        bitField0_ = (bitField0_ & ~0x00000001);
        healthReport_ = "";
        bitField0_ = (bitField0_ & ~0x00000002);
        lastHealthReportTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000004);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_NodeHealthStatusProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto build() {
        org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.isNodeHealthy_ = isNodeHealthy_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          to_bitField0_ |= 0x00000002;
        }
        result.healthReport_ = healthReport_;
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.lastHealthReportTime_ = lastHealthReportTime_;
          to_bitField0_ |= 0x00000004;
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto.getDefaultInstance()) return this;
        if (other.hasIsNodeHealthy()) {
          setIsNodeHealthy(other.getIsNodeHealthy());
        }
        if (other.hasHealthReport()) {
          bitField0_ |= 0x00000002;
          healthReport_ = other.healthReport_;
          onChanged();
        }
        if (other.hasLastHealthReportTime()) {
          setLastHealthReportTime(other.getLastHealthReportTime());
        }
        this.mergeUnknownFields(other.unknownFields);
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto) e.getUnfinishedMessage();
          throw e.unwrapIOException();
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      private boolean isNodeHealthy_ ;
      /**
       * optional bool is_node_healthy = 1;
       */
      public boolean hasIsNodeHealthy() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * optional bool is_node_healthy = 1;
       */
      public boolean getIsNodeHealthy() {
        return isNodeHealthy_;
      }
      /**
       * optional bool is_node_healthy = 1;
       */
      public Builder setIsNodeHealthy(boolean value) {
        bitField0_ |= 0x00000001;
        isNodeHealthy_ = value;
        onChanged();
        return this;
      }
      /**
       * optional bool is_node_healthy = 1;
       */
      public Builder clearIsNodeHealthy() {
        bitField0_ = (bitField0_ & ~0x00000001);
        isNodeHealthy_ = false;
        onChanged();
        return this;
      }

      private java.lang.Object healthReport_ = "";
      /**
       * optional string health_report = 2;
       */
      public boolean hasHealthReport() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * optional string health_report = 2;
       */
      public java.lang.String getHealthReport() {
        java.lang.Object ref = healthReport_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            healthReport_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string health_report = 2;
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getHealthReportBytes() {
        java.lang.Object ref = healthReport_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          healthReport_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string health_report = 2;
       */
      public Builder setHealthReport(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000002;
        healthReport_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string health_report = 2;
       */
      public Builder clearHealthReport() {
        bitField0_ = (bitField0_ & ~0x00000002);
        healthReport_ = getDefaultInstance().getHealthReport();
        onChanged();
        return this;
      }
      /**
       * optional string health_report = 2;
       */
      public Builder setHealthReportBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000002;
        healthReport_ = value;
        onChanged();
        return this;
      }

      private long lastHealthReportTime_ ;
      /**
       * optional int64 last_health_report_time = 3;
       */
      public boolean hasLastHealthReportTime() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * optional int64 last_health_report_time = 3;
       */
      public long getLastHealthReportTime() {
        return lastHealthReportTime_;
      }
      /**
       * optional int64 last_health_report_time = 3;
       */
      public Builder setLastHealthReportTime(long value) {
        bitField0_ |= 0x00000004;
        lastHealthReportTime_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int64 last_health_report_time = 3;
       */
      public Builder clearLastHealthReportTime() {
        bitField0_ = (bitField0_ & ~0x00000004);
        lastHealthReportTime_ = 0L;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeHealthStatusProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.NodeHealthStatusProto)
    private static final org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
      @java.lang.Override
      public NodeHealthStatusProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        return new NodeHealthStatusProto(input, extensionRegistry);
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeHealthStatusProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface VersionProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.VersionProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * optional int32 major_version = 1;
     */
    boolean hasMajorVersion();
    /**
     * optional int32 major_version = 1;
     */
    int getMajorVersion();

    /**
     * optional int32 minor_version = 2;
     */
    boolean hasMinorVersion();
    /**
     * optional int32 minor_version = 2;
     */
    int getMinorVersion();
  }
  /**
   * Protobuf type {@code hadoop.yarn.VersionProto}
   */
  public  static final class VersionProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.VersionProto)
      VersionProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use VersionProto.newBuilder() to construct.
    private VersionProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) {
      super(builder);
    }
    private VersionProto() {
    }

    @java.lang.Override
    public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
    getUnknownFields() {
      return this.unknownFields;
    }
    private VersionProto(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      this();
      if (extensionRegistry == null) {
        throw new java.lang.NullPointerException();
      }
      int mutable_bitField0_ = 0;
      org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
          org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            case 8: {
              bitField0_ |= 0x00000001;
              majorVersion_ = input.readInt32();
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              minorVersion_ = input.readInt32();
              break;
            }
            default: {
              if (!parseUnknownField(
                  input, unknownFields, extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
          }
        }
      } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
            e).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_VersionProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_VersionProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto.Builder.class);
    }

    private int bitField0_;
    public static final int MAJOR_VERSION_FIELD_NUMBER = 1;
    private int majorVersion_;
    /**
     * optional int32 major_version = 1;
     */
    public boolean hasMajorVersion() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * optional int32 major_version = 1;
     */
    public int getMajorVersion() {
      return majorVersion_;
    }

    public static final int MINOR_VERSION_FIELD_NUMBER = 2;
    private int minorVersion_;
    /**
     * optional int32 minor_version = 2;
     */
    public boolean hasMinorVersion() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * optional int32 minor_version = 2;
     */
    public int getMinorVersion() {
      return minorVersion_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeInt32(1, majorVersion_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt32(2, minorVersion_);
      }
      unknownFields.writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(1, majorVersion_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(2, minorVersion_);
      }
      size += unknownFields.getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto) obj;

      if (hasMajorVersion() != other.hasMajorVersion()) return false;
      if (hasMajorVersion()) {
        if (getMajorVersion()
            != other.getMajorVersion()) return false;
      }
      if (hasMinorVersion() != other.hasMinorVersion()) return false;
      if (hasMinorVersion()) {
        if (getMinorVersion()
            != other.getMinorVersion()) return false;
      }
      if (!unknownFields.equals(other.unknownFields)) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasMajorVersion()) {
        hash = (37 * hash) + MAJOR_VERSION_FIELD_NUMBER;
        hash = (53 * hash) + getMajorVersion();
      }
      if (hasMinorVersion()) {
        hash = (37 * hash) + MINOR_VERSION_FIELD_NUMBER;
        hash = (53 * hash) + getMinorVersion();
      }
      hash = (29 * hash) + unknownFields.hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.VersionProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.VersionProto)
        org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_VersionProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_VersionProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        majorVersion_ = 0;
        bitField0_ = (bitField0_ & ~0x00000001);
        minorVersion_ = 0;
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.internal_static_hadoop_yarn_VersionProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto build() {
        org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.majorVersion_ = majorVersion_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.minorVersion_ = minorVersion_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto.getDefaultInstance()) return this;
        if (other.hasMajorVersion()) {
          setMajorVersion(other.getMajorVersion());
        }
        if (other.hasMinorVersion()) {
          setMinorVersion(other.getMinorVersion());
        }
        this.mergeUnknownFields(other.unknownFields);
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto) e.getUnfinishedMessage();
          throw e.unwrapIOException();
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      private int majorVersion_ ;
      /**
       * optional int32 major_version = 1;
       */
      public boolean hasMajorVersion() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * optional int32 major_version = 1;
       */
      public int getMajorVersion() {
        return majorVersion_;
      }
      /**
       * optional int32 major_version = 1;
       */
      public Builder setMajorVersion(int value) {
        bitField0_ |= 0x00000001;
        majorVersion_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 major_version = 1;
       */
      public Builder clearMajorVersion() {
        bitField0_ = (bitField0_ & ~0x00000001);
        majorVersion_ = 0;
        onChanged();
        return this;
      }

      private int minorVersion_ ;
      /**
       * optional int32 minor_version = 2;
       */
      public boolean hasMinorVersion() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * optional int32 minor_version = 2;
       */
      public int getMinorVersion() {
        return minorVersion_;
      }
      /**
       * optional int32 minor_version = 2;
       */
      public Builder setMinorVersion(int value) {
        bitField0_ |= 0x00000002;
        minorVersion_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 minor_version = 2;
       */
      public Builder clearMinorVersion() {
        bitField0_ = (bitField0_ & ~0x00000002);
        minorVersion_ = 0;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.VersionProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.VersionProto)
    private static final org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
      @java.lang.Override
      public VersionProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        return new VersionProto(input, extensionRegistry);
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_NodeStatusProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_NodeStatusProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_OpportunisticContainersStatusProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_OpportunisticContainersStatusProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_MasterKeyProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_MasterKeyProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_NodeHealthStatusProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_NodeHealthStatusProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_VersionProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_VersionProto_fieldAccessorTable;

  public static org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      getDescriptor() {
    return descriptor;
  }
  private static  org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      descriptor;
  static {
    java.lang.String[] descriptorData = {
      "\n\037yarn_server_common_protos.proto\022\013hadoo" +
      "p.yarn\032\021yarn_protos.proto\"\255\004\n\017NodeStatus" +
      "Proto\022)\n\007node_id\030\001 \001(\0132\030.hadoop.yarn.Nod" +
      "eIdProto\022\023\n\013response_id\030\002 \001(\005\022=\n\022contain" +
      "ersStatuses\030\003 \003(\0132!.hadoop.yarn.Containe" +
      "rStatusProto\022<\n\020nodeHealthStatus\030\004 \001(\0132\"" +
      ".hadoop.yarn.NodeHealthStatusProto\022@\n\027ke" +
      "ep_alive_applications\030\005 \003(\0132\037.hadoop.yar" +
      "n.ApplicationIdProto\022E\n\026containers_utili" +
      "zation\030\006 \001(\0132%.hadoop.yarn.ResourceUtili" +
      "zationProto\022?\n\020node_utilization\030\007 \001(\0132%." +
      "hadoop.yarn.ResourceUtilizationProto\0229\n\024" +
      "increased_containers\030\010 \003(\0132\033.hadoop.yarn" +
      ".ContainerProto\022X\n\037opportunistic_contain" +
      "ers_status\030\t \001(\0132/.hadoop.yarn.Opportuni" +
      "sticContainersStatusProto\"\375\001\n\"Opportunis" +
      "ticContainersStatusProto\022!\n\031running_oppo" +
      "rt_containers\030\001 \001(\005\022\032\n\022opport_memory_use" +
      "d\030\002 \001(\003\022\031\n\021opport_cores_used\030\003 \001(\005\022 \n\030qu" +
      "eued_opport_containers\030\004 \001(\005\022\031\n\021wait_que" +
      "ue_length\030\005 \001(\005\022!\n\031estimated_queue_wait_" +
      "time\030\006 \001(\005\022\035\n\025opport_queue_capacity\030\007 \001(" +
      "\005\"/\n\016MasterKeyProto\022\016\n\006key_id\030\001 \001(\005\022\r\n\005b" +
      "ytes\030\002 \001(\014\"h\n\025NodeHealthStatusProto\022\027\n\017i" +
      "s_node_healthy\030\001 \001(\010\022\025\n\rhealth_report\030\002 " +
      "\001(\t\022\037\n\027last_health_report_time\030\003 \001(\003\"<\n\014" +
      "VersionProto\022\025\n\rmajor_version\030\001 \001(\005\022\025\n\rm" +
      "inor_version\030\002 \001(\005*7\n\017NodeActionProto\022\n\n" +
      "\006NORMAL\020\000\022\n\n\006RESYNC\020\001\022\014\n\010SHUTDOWN\020\002B<\n\034o" +
      "rg.apache.hadoop.yarn.protoB\026YarnServerC" +
      "ommonProtos\210\001\001\240\001\001"
    };
    org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
        new org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor.    InternalDescriptorAssigner() {
          public org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry assignDescriptors(
              org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor root) {
            descriptor = root;
            return null;
          }
        };
    org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      .internalBuildGeneratedFileFrom(descriptorData,
        new org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor[] {
          org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor(),
        }, assigner);
    internal_static_hadoop_yarn_NodeStatusProto_descriptor =
      getDescriptor().getMessageTypes().get(0);
    internal_static_hadoop_yarn_NodeStatusProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_NodeStatusProto_descriptor,
        new java.lang.String[] { "NodeId", "ResponseId", "ContainersStatuses", "NodeHealthStatus", "KeepAliveApplications", "ContainersUtilization", "NodeUtilization", "IncreasedContainers", "OpportunisticContainersStatus", });
    internal_static_hadoop_yarn_OpportunisticContainersStatusProto_descriptor =
      getDescriptor().getMessageTypes().get(1);
    internal_static_hadoop_yarn_OpportunisticContainersStatusProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_OpportunisticContainersStatusProto_descriptor,
        new java.lang.String[] { "RunningOpportContainers", "OpportMemoryUsed", "OpportCoresUsed", "QueuedOpportContainers", "WaitQueueLength", "EstimatedQueueWaitTime", "OpportQueueCapacity", });
    internal_static_hadoop_yarn_MasterKeyProto_descriptor =
      getDescriptor().getMessageTypes().get(2);
    internal_static_hadoop_yarn_MasterKeyProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_MasterKeyProto_descriptor,
        new java.lang.String[] { "KeyId", "Bytes", });
    internal_static_hadoop_yarn_NodeHealthStatusProto_descriptor =
      getDescriptor().getMessageTypes().get(3);
    internal_static_hadoop_yarn_NodeHealthStatusProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_NodeHealthStatusProto_descriptor,
        new java.lang.String[] { "IsNodeHealthy", "HealthReport", "LastHealthReportTime", });
    internal_static_hadoop_yarn_VersionProto_descriptor =
      getDescriptor().getMessageTypes().get(4);
    internal_static_hadoop_yarn_VersionProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_VersionProto_descriptor,
        new java.lang.String[] { "MajorVersion", "MinorVersion", });
    org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor();
  }

  // @@protoc_insertion_point(outer_class_scope)
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy